add some features controlled by command line

This commit is contained in:
2023-02-10 13:59:45 +11:00
parent 9fe154e5a7
commit 2dda0e3f14
10 changed files with 99 additions and 1677 deletions

BIN
.DS_Store vendored

Binary file not shown.

6
.gitignore vendored
View File

@@ -1,2 +1,8 @@
# Ignore any generated xlsx documents
*.xlsx
# Ignore compiled binary
json2excel
# Ignore test data
*.json

BIN
cmd/.DS_Store vendored

Binary file not shown.

View File

@@ -2,6 +2,7 @@ package main
import (
"encoding/json"
"flag"
"fmt"
"log"
"os"
@@ -13,10 +14,27 @@ import (
// Initial concept from https://stackoverflow.com/q/68621039
func main() {
jsonFile := "test.json"
//jsonFile := "test.json"
parentNode := "input"
sheetName := "Sheet2"
outputFilename := "test.xlsx"
//worksheetName := "Sheet2"
//outputFilename := "test.xlsx"
// Command line arguments
var inputJson string
var worksheetName string
var outputFilename string
var boldTopRow bool
var freezeTopRow bool
var autoFilter bool
// Process command line arguments
flag.StringVar(&inputJson, "inputJson", "./input.json", "Full path to input json data file")
flag.StringVar(&outputFilename, "outputFilename", "./output.xlsx", "Filename for excel worksheet output")
flag.StringVar(&worksheetName, "worksheetName", "Sheet1", "Label to set on worksheet")
flag.BoolVar(&boldTopRow, "bold-toprow", true, "Sets the top row of the worksheet to bold")
flag.BoolVar(&freezeTopRow, "freeze-toprow", true, "Freezes the first row of the Excel worksheet")
flag.BoolVar(&autoFilter, "autofilter", true, "Sets the auto filter on the first row")
flag.Parse()
var xlsx *excelize.File
var s []byte
@@ -27,7 +45,7 @@ func main() {
var cell string
var row, column int
// TODO - truncate sheetName to the maximum 31 characters
// TODO - truncate worksheetName to the maximum 31 characters
// Check if xlsx file exists already, and if it does then open and append data
if fileExists(outputFilename) {
@@ -35,23 +53,23 @@ func main() {
xlsx, err = excelize.OpenFile(outputFilename)
if err != nil {
fmt.Println(err)
return
os.Exit(1)
}
// Since we have an existing workbook, check if the sheet we want to write to already exists
sheetFound := false
for index, name := range xlsx.GetSheetMap() {
if name == sheetName {
fmt.Printf("Found worksheet '%s' at index '%d'\n", sheetName, index)
if name == worksheetName {
fmt.Printf("Found worksheet '%s' at index '%d'\n", worksheetName, index)
sheetFound = true
}
}
if !sheetFound {
// Create the sheet
fmt.Printf("Creating worksheet '%s'\n", sheetName)
sheetIndex, err = xlsx.NewSheet(sheetName)
fmt.Printf("Creating worksheet '%s'\n", worksheetName)
sheetIndex, err = xlsx.NewSheet(worksheetName)
if err != nil {
fmt.Printf("Error creating worksheet '%s' : %s\n", sheetName, err)
fmt.Printf("Error creating worksheet '%s' : %s\n", worksheetName, err)
}
// Set active worksheet
fmt.Printf("Setting active sheet to index %d", sheetIndex)
@@ -64,26 +82,27 @@ func main() {
xlsx = excelize.NewFile()
// Rename the default Sheet1 to this worksheet name
if sheetName != "Sheet1" {
fmt.Printf("Renaming default worksheet to '%s'\n", sheetName)
err = xlsx.SetSheetName("Sheet1", sheetName)
if worksheetName != "Sheet1" {
fmt.Printf("Renaming default worksheet to '%s'\n", worksheetName)
err = xlsx.SetSheetName("Sheet1", worksheetName)
if err != nil {
fmt.Printf("Error setting sheet name to '%s': %s\n", sheetName, err)
fmt.Printf("Error setting sheet name to '%s': %s\n", worksheetName, err)
}
}
}
// Read the json input file
if fileExists(jsonFile) {
s, err = os.ReadFile(jsonFile)
if fileExists(inputJson) {
s, err = os.ReadFile(inputJson)
if err != nil {
panic(err)
}
} else {
fmt.Printf("Input JSON file '%s' does not exist.\n", jsonFile)
fmt.Printf("Input JSON file '%s' does not exist.\n", inputJson)
os.Exit(1)
}
// Read the json into an orderedmap to preserve the ordering of json structure
// Unmarshal the json into an orderedmap to preserve the ordering of json structure
o := orderedmap.New()
err = json.Unmarshal([]byte(s), &o)
if err != nil {
@@ -92,15 +111,15 @@ func main() {
// Assume that our content is within the first top-level key
topLevel := o.Keys()
fmt.Printf("topLevel: %v\n", topLevel)
fmt.Printf("Detected toplevel json key as: '%s'\n", topLevel[0])
parentNode = topLevel[0]
// Get a reference to the top level node we specified earlier
// TODO - validate this key exists
vislice, ok := o.Get(parentNode)
if !ok {
fmt.Printf("Missing key for multitype array")
}
// Get an interface that we can work with to access the sub elements
vslice := vislice.([]interface{})
// Get the keys for the first element so we know what the column names will be
@@ -108,22 +127,70 @@ func main() {
columnNames := columnMap.Keys()
fmt.Printf("Creating excel workbook with following headings : '%v'\n", columnNames)
// Add the header row
// Set the style for the header values
// Just handling bold for now but we can do other styles too as per https://xuri.me/excelize/en/style.html#NewStyle
headerStyle, err2 := xlsx.NewStyle(&excelize.Style{
Font: &excelize.Font{
Bold: boldTopRow,
},
})
if err2 != nil {
fmt.Printf("Error generating header style : '%s'\n", err2)
}
row = 1
column = 1
// Set the style
err = xlsx.SetRowStyle(worksheetName, row, row, headerStyle)
if err != nil {
fmt.Printf("Error setting header style : '%s'\n", err)
}
// Add the header row
for i := 0; i < len(columnNames); i++ {
cell, _ = excelize.CoordinatesToCellName(column, row)
fmt.Printf("Setting cell %s to value %s\n", cell, columnNames[i])
xlsx.SetCellValue(sheetName, cell, columnNames[i])
xlsx.SetCellValue(worksheetName, cell, columnNames[i])
//xlsx.SetCellStyle(worksheetName, cell, cell, headerStyle)
column++
}
// Freeze top row if requested, see https://xuri.me/excelize/en/utils.html#SetPanes
if freezeTopRow {
err = xlsx.SetPanes(worksheetName, &excelize.Panes{
Freeze: true,
Split: false,
XSplit: 0,
YSplit: 1,
TopLeftCell: "A2",
ActivePane: "bottomLeft",
Panes: []excelize.PaneOptions{
{SQRef: "A2", ActiveCell: "A2", Pane: "bottomLeft"},
},
})
if err != nil {
fmt.Printf("Error freezing top row : '%s'\n", err)
}
}
// Handle autofilter
if autoFilter {
// cell is still a reference to the last cell in the header row
filterRange := "A1:" + cell
fmt.Printf("Setting autofilter to range '%s'\n", filterRange)
err = xlsx.AutoFilter(worksheetName, filterRange, nil)
if err != nil {
fmt.Printf("Error setting autofilter : '%s'\n", err)
}
}
// Now process the remaining data in our json input
// Set starting row for data
row = 2
fmt.Printf("Adding %d rows of data to spreadsheet.\n", len(vslice))
// Iterate the whole slice to get the data and add to the worksheet
fmt.Printf("Adding %d rows of data to spreadsheet.\n", len(vslice))
for i, v := range vslice {
// Print the contents each slice
//fmt.Printf("'%d' : '%v'\n", i, v)
@@ -145,7 +212,7 @@ func main() {
e, _ := vmap.Get(k[j])
//fmt.Printf("Setting cell %s to value %v\n", cell, e)
xlsx.SetCellValue(sheetName, cell, e)
xlsx.SetCellValue(worksheetName, cell, e)
// Move to the next column
//asciiValue++

View File

@@ -1,108 +0,0 @@
package main
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"github.com/xuri/excelize/v2"
)
// Initial concept from https://stackoverflow.com/q/68621039
var end = errors.New("invalid end of array or object")
func main() {
file, err := ioutil.ReadFile("test.json")
if err != nil {
panic(err)
}
testkey, err := objectKeys([]byte(file))
fmt.Println(testkey, err)
return
var data interface{}
json.Unmarshal(file, &data) // reading all json data
//fmt.Println(data)
t, ok := data.([]interface{}) // assertion Interface
_ = ok
//fmt.Printf("%[1]v %[1]T\n", t) //map[string]interface {}
myMap := t[0] // aim here to get APP, Company and Category which will be the column name of Excel sheet
fmt.Printf("myMap: %v\n", myMap)
columnData, _ := myMap.(map[string]interface{}) // extract the underlying concrete data from interface
fmt.Printf("columnData: %v\n", columnData)
keys := make([]string, 0, len(columnData)) // creating and initializing slice to store column
for k := range columnData {
fmt.Printf("%[1]v %[1]T\n", k)
keys = append(keys, k)
}
return
xlsx := excelize.NewFile()
sheetName := "Sheet1"
xlsx.SetSheetName(xlsx.GetSheetName(1), sheetName)
c := 'A'
asciiValue := int(c)
var a string
for i := 0; i < len(keys); i++ {
a = string(asciiValue)
xlsx.SetCellValue(sheetName, a+"1", keys[i])
asciiValue++
}
err = xlsx.SaveAs("./Onkar.xlsx")
if err != nil {
fmt.Println(err)
return
}
fmt.Println("Excel file generated sucessfully")
}
func objectKeys(b []byte) ([]string, error) {
d := json.NewDecoder(bytes.NewReader(b))
t, err := d.Token()
if err != nil {
return nil, err
}
if t != json.Delim('{') {
return nil, errors.New("expected start of object")
}
var keys []string
for {
t, err := d.Token()
if err != nil {
return nil, err
}
if t == json.Delim('}') {
return keys, nil
}
keys = append(keys, t.(string))
if err := skipValue(d); err != nil {
return nil, err
}
}
}
func skipValue(d *json.Decoder) error {
t, err := d.Token()
if err != nil {
return err
}
switch t {
case json.Delim('['), json.Delim('{'):
for {
if err := skipValue(d); err != nil {
if err == end {
break
}
return err
}
}
case json.Delim(']'), json.Delim('}'):
return end
}
return nil
}

BIN
internal/.DS_Store vendored Normal file

Binary file not shown.

View File

@@ -1,3 +0,0 @@
module ordered
go 1.19

View File

@@ -1,267 +0,0 @@
// Package ordered provided a type OrderedMap for use in JSON handling
// although JSON spec says the keys order of an object should not matter
// but sometimes when working with particular third-party proprietary code
// which has incorrect using the keys order, we have to maintain the object keys
// in the same order of incoming JSON object, this package is useful for these cases.
//
// Disclaimer:
// same as Go's default [map](https://blog.golang.org/go-maps-in-action),
// this OrderedMap is not safe for concurrent use, if need atomic access, may use a sync.Mutex to synchronize.
package ordered
// Refers
// JSON and Go https://blog.golang.org/json-and-go
// Go-Ordered-JSON https://github.com/virtuald/go-ordered-json
// Python OrderedDict https://github.com/python/cpython/blob/2.7/Lib/collections.py#L38
// port OrderedDict https://github.com/cevaris/ordered_map
import (
"bytes"
"container/list"
"encoding/json"
"fmt"
"io"
)
// the key-value pair type, for initializing from a list of key-value pairs, or for looping entries in the same order
type KVPair struct {
Key string
Value interface{}
}
type m map[string]interface{}
// the OrderedMap type, has similar operations as the default map, but maintained
// the keys order of inserted; similar to map, all single key operations (Get/Set/Delete) runs at O(1).
type OrderedMap struct {
m
l *list.List
keys map[string]*list.Element // the double linked list for delete and lookup to be O(1)
}
// Create a new OrderedMap
func NewOrderedMap() *OrderedMap {
return &OrderedMap{
m: make(map[string]interface{}),
l: list.New(),
keys: make(map[string]*list.Element),
}
}
// Create a new OrderedMap and populate from a list of key-value pairs
func NewOrderedMapFromKVPairs(pairs []*KVPair) *OrderedMap {
om := NewOrderedMap()
for _, pair := range pairs {
om.Set(pair.Key, pair.Value)
}
return om
}
// return all keys
// func (om *OrderedMap) Keys() []string { return om.keys }
// set value for particular key, this will remember the order of keys inserted
// but if the key already exists, the order is not updated.
func (om *OrderedMap) Set(key string, value interface{}) {
if _, ok := om.m[key]; !ok {
om.keys[key] = om.l.PushBack(key)
}
om.m[key] = value
}
// Check if value exists
func (om *OrderedMap) Has(key string) bool {
_, ok := om.m[key]
return ok
}
// Get value for particular key, or nil if not exist; but don't rely on nil for non-exist; should check by Has or GetValue
func (om *OrderedMap) Get(key string) interface{} {
return om.m[key]
}
// Get value and exists together
func (om *OrderedMap) GetValue(key string) (value interface{}, ok bool) {
value, ok = om.m[key]
return
}
// deletes the element with the specified key (m[key]) from the map. If there is no such element, this is a no-op.
func (om *OrderedMap) Delete(key string) (value interface{}, ok bool) {
value, ok = om.m[key]
if ok {
om.l.Remove(om.keys[key])
delete(om.keys, key)
delete(om.m, key)
}
return
}
// Iterate all key/value pairs in the same order of object constructed
func (om *OrderedMap) EntriesIter() func() (*KVPair, bool) {
e := om.l.Front()
return func() (*KVPair, bool) {
if e != nil {
key := e.Value.(string)
e = e.Next()
return &KVPair{key, om.m[key]}, true
}
return nil, false
}
}
// Iterate all key/value pairs in the reverse order of object constructed
func (om *OrderedMap) EntriesReverseIter() func() (*KVPair, bool) {
e := om.l.Back()
return func() (*KVPair, bool) {
if e != nil {
key := e.Value.(string)
e = e.Prev()
return &KVPair{key, om.m[key]}, true
}
return nil, false
}
}
// this implements type json.Marshaler interface, so can be called in json.Marshal(om)
func (om *OrderedMap) MarshalJSON() (res []byte, err error) {
res = append(res, '{')
front, back := om.l.Front(), om.l.Back()
for e := front; e != nil; e = e.Next() {
k := e.Value.(string)
res = append(res, fmt.Sprintf("%q:", k)...)
var b []byte
b, err = json.Marshal(om.m[k])
if err != nil {
return
}
res = append(res, b...)
if e != back {
res = append(res, ',')
}
}
res = append(res, '}')
// fmt.Printf("marshalled: %v: %#v\n", res, res)
return
}
// this implements type json.Unmarshaler interface, so can be called in json.Unmarshal(data, om)
func (om *OrderedMap) UnmarshalJSON(data []byte) error {
dec := json.NewDecoder(bytes.NewReader(data))
dec.UseNumber()
// must open with a delim token '{'
t, err := dec.Token()
if err != nil {
return err
}
if delim, ok := t.(json.Delim); !ok || delim != '{' {
return fmt.Errorf("expect JSON object open with '{'")
}
err = om.parseobject(dec)
if err != nil {
return err
}
t, err = dec.Token()
if err != io.EOF {
return fmt.Errorf("expect end of JSON object but got more token: %T: %v or err: %v", t, t, err)
}
return nil
}
func (om *OrderedMap) parseobject(dec *json.Decoder) (err error) {
var t json.Token
for dec.More() {
t, err = dec.Token()
if err != nil {
return err
}
key, ok := t.(string)
if !ok {
return fmt.Errorf("expecting JSON key should be always a string: %T: %v", t, t)
}
t, err = dec.Token()
if err == io.EOF {
break
} else if err != nil {
return err
}
var value interface{}
value, err = handledelim(t, dec)
if err != nil {
return err
}
// om.keys = append(om.keys, key)
om.keys[key] = om.l.PushBack(key)
om.m[key] = value
}
t, err = dec.Token()
if err != nil {
return err
}
if delim, ok := t.(json.Delim); !ok || delim != '}' {
return fmt.Errorf("expect JSON object close with '}'")
}
return nil
}
func parsearray(dec *json.Decoder) (arr []interface{}, err error) {
var t json.Token
arr = make([]interface{}, 0)
for dec.More() {
t, err = dec.Token()
if err != nil {
return
}
var value interface{}
value, err = handledelim(t, dec)
if err != nil {
return
}
arr = append(arr, value)
}
t, err = dec.Token()
if err != nil {
return
}
if delim, ok := t.(json.Delim); !ok || delim != ']' {
err = fmt.Errorf("expect JSON array close with ']'")
return
}
return
}
func handledelim(t json.Token, dec *json.Decoder) (res interface{}, err error) {
if delim, ok := t.(json.Delim); ok {
switch delim {
case '{':
om2 := NewOrderedMap()
err = om2.parseobject(dec)
if err != nil {
return
}
return om2, nil
case '[':
var value []interface{}
value, err = parsearray(dec)
if err != nil {
return
}
return value, nil
default:
return nil, fmt.Errorf("Unexpected delimiter: %q", delim)
}
}
return t, nil
}

Binary file not shown.

1273
test.json

File diff suppressed because it is too large Load Diff