All checks were successful
continuous-integration/drone/push Build is passing
481 lines
13 KiB
Go
481 lines
13 KiB
Go
package main
|
|
|
|
import (
|
|
"bytes"
|
|
"encoding/json"
|
|
"flag"
|
|
"fmt"
|
|
"io"
|
|
"log"
|
|
"os"
|
|
"reflect"
|
|
"runtime"
|
|
"slices"
|
|
"sort"
|
|
"strconv"
|
|
"time"
|
|
|
|
"github.com/xuri/excelize/v2"
|
|
"golang.org/x/text/encoding/unicode"
|
|
"golang.org/x/text/transform"
|
|
"golang.org/x/text/unicode/norm"
|
|
)
|
|
|
|
type Input struct {
|
|
Tracking struct {
|
|
AvgCpu struct {
|
|
Tin Record `json:"Tin"`
|
|
Bronze Record `json:"Bronze"`
|
|
Silver Record `json:"Silver"`
|
|
Gold Record `json:"Gold"`
|
|
} `json:"AVG vCPUs"`
|
|
AvgRam Record `json:"Avg RAM (GB)"`
|
|
ProVmCount Record `json:"ProRated VM Count"`
|
|
VmCount Record `json:"VM Name"`
|
|
} `json:"Tracking"`
|
|
}
|
|
|
|
type Record struct {
|
|
Ryde json.RawMessage `json:"Ryde Computer Centre"`
|
|
Wsdc json.RawMessage `json:"Western Sydney Data Centre"`
|
|
}
|
|
|
|
type KeyValue struct {
|
|
Key string
|
|
Value interface{}
|
|
}
|
|
|
|
var sha1ver string // sha1 revision used to build the program
|
|
var buildTime string // when the executable was built
|
|
|
|
// Add a new key-value pair to the slice
|
|
func addData(data []KeyValue, key string, value interface{}) []KeyValue {
|
|
return append(data, KeyValue{Key: key, Value: value})
|
|
}
|
|
|
|
// Find a value by key
|
|
func findData(data []KeyValue, key string) (interface{}, bool) {
|
|
for _, kv := range data {
|
|
if kv.Key == key {
|
|
return kv.Value, true
|
|
}
|
|
}
|
|
return nil, false // Return false if key not found
|
|
}
|
|
|
|
func GenerateAvgVcpusCharts(f *excelize.File, data any, name string) {
|
|
var err error
|
|
rydeAvgCpu := []KeyValue{}
|
|
wsdcAvgCpu := []KeyValue{}
|
|
var avgCpuColumns []string
|
|
|
|
// Access AvgCpu using reflection
|
|
avgCpuVal := reflect.ValueOf(data)
|
|
avgCpuType := reflect.TypeOf(data)
|
|
|
|
// Iterate over each field in AvgCpu
|
|
for i := 0; i < avgCpuVal.NumField(); i++ {
|
|
field := avgCpuVal.Field(i)
|
|
fieldName := avgCpuType.Field(i).Name
|
|
|
|
// Create variables to hold unmarshaled data
|
|
var rydeData interface{}
|
|
var wsdcData interface{}
|
|
|
|
// Unmarshal Ryde and Wsdc fields into interface{} if they contain valid JSON
|
|
ryde := field.FieldByName("Ryde")
|
|
wsdc := field.FieldByName("Wsdc")
|
|
|
|
if len(ryde.Interface().(json.RawMessage)) > 0 {
|
|
err = json.Unmarshal(ryde.Interface().(json.RawMessage), &rydeData)
|
|
if err != nil {
|
|
fmt.Printf("Error unmarshaling Ryde for %s: %v\n", fieldName, err)
|
|
return
|
|
}
|
|
}
|
|
|
|
if len(wsdc.Interface().(json.RawMessage)) > 0 {
|
|
err = json.Unmarshal(wsdc.Interface().(json.RawMessage), &wsdcData)
|
|
if err != nil {
|
|
fmt.Printf("Error unmarshaling Wsdc for %s: %v\n", fieldName, err)
|
|
return
|
|
}
|
|
}
|
|
|
|
rydeAvgCpu = addData(rydeAvgCpu, fieldName, rydeData)
|
|
wsdcAvgCpu = addData(wsdcAvgCpu, fieldName, wsdcData)
|
|
//rydeAvgCpu[fieldName] = rydeData
|
|
//wsdcAvgCpu[fieldName] = wsdcData
|
|
|
|
avgCpuColumns = append(avgCpuColumns, fieldName)
|
|
}
|
|
|
|
// Generate RCC worksheet and graph
|
|
AvgChart(f, name+" RCC", "A1", avgCpuColumns, rydeAvgCpu, 0, 0)
|
|
// Generate WSDC worksheet and graph
|
|
AvgChart(f, name+" WSDC", "N1", avgCpuColumns, wsdcAvgCpu, 0, 0)
|
|
}
|
|
|
|
func GenerateCharts(f *excelize.File, data any, name string, location string, yMinValue float64, yMaxValue float64) {
|
|
var err error
|
|
//parsedData := make(map[string]interface{})
|
|
parsedData := []KeyValue{}
|
|
var dataColumns = []string{"RCC", "WSDC"}
|
|
|
|
// Create interfaces to hold unmarshaled data
|
|
var rcc interface{}
|
|
var wsdc interface{}
|
|
|
|
// Access specific json objects using reflection
|
|
values := reflect.ValueOf(data)
|
|
rccField := values.FieldByName("Ryde")
|
|
wsdcField := values.FieldByName("Wsdc")
|
|
|
|
// unmarshal raw json into interfaces
|
|
if len(rccField.Interface().(json.RawMessage)) > 0 {
|
|
err = json.Unmarshal(rccField.Interface().(json.RawMessage), &rcc)
|
|
if err != nil {
|
|
fmt.Printf("Error unmarshaling Ryde: %v\n", err)
|
|
return
|
|
}
|
|
}
|
|
if len(wsdcField.Interface().(json.RawMessage)) > 0 {
|
|
err = json.Unmarshal(wsdcField.Interface().(json.RawMessage), &wsdc)
|
|
if err != nil {
|
|
fmt.Printf("Error unmarshaling Wsdc: %v\n", err)
|
|
return
|
|
}
|
|
}
|
|
|
|
// store the data together
|
|
parsedData = addData(parsedData, "RCC", rcc)
|
|
parsedData = addData(parsedData, "WSDC", wsdc)
|
|
//parsedData["RCC"] = rcc
|
|
//parsedData["WSDC"] = wsdc
|
|
//prettyPrint(parsedData)
|
|
|
|
// Generate worksheet and graph
|
|
AvgChart(f, name, location, dataColumns, parsedData, yMinValue, yMaxValue)
|
|
}
|
|
|
|
func AvgChart(f *excelize.File, worksheetName string, location string, avgCpuColumns []string, data []KeyValue,
|
|
yMinValue float64, yMaxValue float64) {
|
|
var err error
|
|
var chartSeries []excelize.ChartSeries
|
|
var dataDates []string
|
|
var col int
|
|
var row int
|
|
|
|
fmt.Printf("Creating worksheet %s\n", worksheetName)
|
|
_, err = f.NewSheet(worksheetName)
|
|
if err != nil {
|
|
log.Fatal(err)
|
|
}
|
|
|
|
// Create column headers dynamically
|
|
f.SetCellValue(worksheetName, "A1", "Date")
|
|
for i := 0; i < len(avgCpuColumns); i++ {
|
|
cell := string(rune('A'+i+1)) + "1" // A1, B1, C1, etc.
|
|
f.SetCellValue(worksheetName, cell, avgCpuColumns[i])
|
|
}
|
|
|
|
// Get the values for the dates column using the first object in data
|
|
for _, v := range data {
|
|
if dateMap, ok := v.Value.(map[string]interface{}); ok {
|
|
for date := range dateMap {
|
|
dataDates = append(dataDates, date)
|
|
}
|
|
}
|
|
break // Assuming you only need the dates from the first entry
|
|
}
|
|
|
|
// Sort dates using the custom function
|
|
err = sortDates(dataDates)
|
|
if err != nil {
|
|
fmt.Printf("Failed to sort dates for worksheet %s: %s\n", worksheetName, err)
|
|
return
|
|
}
|
|
|
|
// set the values for the dates column
|
|
for i := 0; i < len(dataDates); i++ {
|
|
cell := string(rune('A')) + strconv.Itoa(i+2) // A2, A3 etc
|
|
f.SetCellValue(worksheetName, cell, dataDates[i])
|
|
}
|
|
|
|
// Iterate over each KeyValue in the data slice (resource pool types)
|
|
for _, poolKv := range data {
|
|
pool := poolKv.Key // The pool name
|
|
|
|
// Find the column that matches, add one to account for the date column
|
|
col = slices.Index(avgCpuColumns, pool) + 1
|
|
//fmt.Printf("Pool: %s, column: %d\n", pool, col)
|
|
|
|
// Type assertion to confirm that poolKv.Value is a map[string]interface{}
|
|
if dateMap, ok := poolKv.Value.(map[string]interface{}); ok {
|
|
for date, val := range dateMap {
|
|
//fmt.Printf("Date: %s, Value: %v\n", date, val)
|
|
|
|
// Find the correct row, add one to account for sheet heading
|
|
row = slices.Index(dataDates, date) + 1
|
|
|
|
cell := string(rune('A'+col)) + strconv.Itoa(row+1)
|
|
//fmt.Printf("Adding value %f (%s) to %s]\n", val, date, cell)
|
|
f.SetCellValue(worksheetName, cell, val)
|
|
}
|
|
}
|
|
// Create the chartseries for this resource pool
|
|
thisChartSeries := excelize.ChartSeries{
|
|
Name: "'" + worksheetName + "'!$" + string(rune('A'+col)) + "$1", // Reference the cell containing the resource pool name, eg Tin in $B$1
|
|
Categories: "'" + worksheetName + "'!$A$2:$A$" + strconv.Itoa(len(dataDates)+1), // Reference the dates in the first column eg $A$2:$A$5
|
|
Values: "'" + worksheetName + "'!$" + string(rune('A'+col)) + "$2:$" + string(rune('A'+col)) + "$" + strconv.Itoa(len(dataDates)+1), // Reference the values in the column matching the resource pool name, eg Tin in $B$2:$B$5
|
|
Line: excelize.ChartLine{
|
|
Smooth: true,
|
|
},
|
|
}
|
|
//prettyPrint(thisChartSeries)
|
|
chartSeries = append(chartSeries, thisChartSeries)
|
|
}
|
|
|
|
chart := excelize.Chart{
|
|
Type: excelize.Line,
|
|
Series: chartSeries,
|
|
Format: excelize.GraphicOptions{
|
|
OffsetX: 5,
|
|
OffsetY: 5,
|
|
},
|
|
Legend: excelize.ChartLegend{
|
|
Position: "right",
|
|
},
|
|
Title: []excelize.RichTextRun{
|
|
{
|
|
Text: worksheetName,
|
|
},
|
|
},
|
|
PlotArea: excelize.ChartPlotArea{
|
|
ShowCatName: false,
|
|
ShowLeaderLines: false,
|
|
ShowPercent: true,
|
|
ShowSerName: false,
|
|
ShowVal: false,
|
|
},
|
|
ShowBlanksAs: "zero",
|
|
XAxis: excelize.ChartAxis{
|
|
MajorGridLines: true,
|
|
MinorGridLines: true,
|
|
Title: []excelize.RichTextRun{
|
|
{
|
|
Text: "Month Year",
|
|
},
|
|
},
|
|
Font: excelize.Font{
|
|
Color: "000000",
|
|
},
|
|
},
|
|
YAxis: excelize.ChartAxis{
|
|
MajorGridLines: true,
|
|
MinorGridLines: true,
|
|
Title: []excelize.RichTextRun{
|
|
{
|
|
Text: "Count/Size",
|
|
},
|
|
},
|
|
Font: excelize.Font{
|
|
Color: "000000",
|
|
},
|
|
},
|
|
Dimension: excelize.ChartDimension{
|
|
Height: 500,
|
|
Width: 800,
|
|
},
|
|
}
|
|
|
|
if yMaxValue > 0 || yMinValue > 0 {
|
|
chart.YAxis.Maximum = &yMaxValue
|
|
chart.YAxis.Minimum = &yMinValue
|
|
}
|
|
|
|
if err := f.AddChart("Report", location, &chart); err != nil {
|
|
fmt.Printf("Error adding chart to workbook %s at location %s: %s\n", worksheetName, location, err)
|
|
return
|
|
}
|
|
}
|
|
|
|
func main() {
|
|
var err error
|
|
var data Input
|
|
|
|
inputFile := flag.String("input", "input.json", "The filename from which to load historical data")
|
|
outputFile := flag.String("output", "book1.xlsx", "The filename to use when writing excel workbook")
|
|
flag.Parse()
|
|
|
|
fmt.Println("Excel chart generation utility, written by Nathan Coad (nathan.coad@dell.com)")
|
|
fmt.Printf("Built on %s from sha1 %s\n", buildTime, sha1ver)
|
|
|
|
// Create the workbook
|
|
f := excelize.NewFile()
|
|
defer func() {
|
|
if err := f.Close(); err != nil {
|
|
fmt.Printf("Error closing excel sheet: %s\n", err)
|
|
os.Exit(1)
|
|
}
|
|
}()
|
|
err = f.SetSheetName("Sheet1", "Report")
|
|
if err != nil {
|
|
fmt.Printf("Error renaming Sheet1: %s\n", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
// Load the JSON data from file
|
|
file, err := os.Open(*inputFile)
|
|
if err != nil {
|
|
fmt.Printf("Failed to open input.json: %v", err)
|
|
os.Exit(1)
|
|
}
|
|
defer file.Close()
|
|
|
|
byteValue, err := io.ReadAll(file)
|
|
if err != nil {
|
|
fmt.Printf("Failed to read input json: %v", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
// Detect encoding and convert to UTF-8 if necessary
|
|
utf8Data, err := ensureUTF8(byteValue)
|
|
if err != nil {
|
|
fmt.Printf("Error ensuring UTF-8 encoding: %v\n", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
if err := json.Unmarshal(utf8Data, &data); err != nil {
|
|
fmt.Printf("Error reading json input: %s\n", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
// Generate charts into workbook
|
|
GenerateAvgVcpusCharts(f, data.Tracking.AvgCpu, "Average vCPUs")
|
|
GenerateCharts(f, data.Tracking.AvgRam, "Average RAM(GB)", "A30", 0, 0)
|
|
GenerateCharts(f, data.Tracking.ProVmCount, "ProRated VM Count", "A60", 0, 10000)
|
|
GenerateCharts(f, data.Tracking.VmCount, "VM Count", "N60", 0, 10000)
|
|
|
|
// Save workbook
|
|
if err := f.SaveAs(*outputFile); err != nil {
|
|
fmt.Printf("Error saving excel workbook: %s\n", err)
|
|
os.Exit(1)
|
|
}
|
|
|
|
os.Exit(0)
|
|
}
|
|
|
|
// parseDate parses a date string like "January-2006" into a time.Time object
|
|
func parseDate(dateStr string) (time.Time, error) {
|
|
layout := "January-2006"
|
|
return time.Parse(layout, dateStr)
|
|
}
|
|
|
|
// sortDates sorts a slice of date strings in the format "Month-Year"
|
|
func sortDates(dates []string) error {
|
|
// Custom sort logic
|
|
sort.Slice(dates, func(i, j int) bool {
|
|
date1, err1 := parseDate(dates[i])
|
|
date2, err2 := parseDate(dates[j])
|
|
|
|
if err1 != nil || err2 != nil {
|
|
fmt.Println("Error parsing dates:", err1, err2)
|
|
return false
|
|
}
|
|
|
|
// Compare parsed dates
|
|
return date1.Before(date2)
|
|
})
|
|
return nil
|
|
}
|
|
|
|
// ensureUTF8 checks if the data is UTF-8, and if not, converts it to UTF-8.
|
|
// It also removes BOM from UTF-8 if present.
|
|
func ensureUTF8(data []byte) ([]byte, error) {
|
|
// Detect and strip UTF-8 BOM if present
|
|
if hasUTF8BOM(data) {
|
|
data = stripUTF8BOM(data)
|
|
}
|
|
|
|
// If data is already UTF-8 (without BOM), return as is
|
|
if isUTF8(data) {
|
|
return data, nil
|
|
}
|
|
|
|
// Detect and decode UTF-16 (either UTF-16LE or UTF-16BE)
|
|
decoder := unicode.UTF16(unicode.LittleEndian, unicode.UseBOM).NewDecoder()
|
|
utf8Data, _, err := transform.Bytes(decoder, data)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("error converting to UTF-8: %v", err)
|
|
}
|
|
|
|
return utf8Data, nil
|
|
}
|
|
|
|
// isUTF8 checks if the byte slice is already encoded in UTF-8
|
|
func isUTF8(data []byte) bool {
|
|
return bytes.Equal(data, norm.NFC.Bytes(data)) // UTF-8 normalization check
|
|
}
|
|
|
|
// hasUTF8BOM checks if the data has a UTF-8 BOM
|
|
func hasUTF8BOM(data []byte) bool {
|
|
return len(data) >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF
|
|
}
|
|
|
|
// stripUTF8BOM removes the UTF-8 BOM if it exists
|
|
func stripUTF8BOM(data []byte) []byte {
|
|
return data[3:]
|
|
}
|
|
|
|
func fetchValue(value interface{}) {
|
|
switch value.(type) {
|
|
case string:
|
|
fmt.Printf("%v is an interface \n ", value)
|
|
case bool:
|
|
fmt.Printf("%v is bool \n ", value)
|
|
case float64:
|
|
fmt.Printf("%v is float64 \n ", value)
|
|
case []interface{}:
|
|
fmt.Printf("%v is a slice of interface \n ", value)
|
|
for _, v := range value.([]interface{}) { // use type assertion to loop over []interface{}
|
|
fetchValue(v)
|
|
}
|
|
case map[string]interface{}:
|
|
fmt.Printf("%v is a map \n ", value)
|
|
for _, v := range value.(map[string]interface{}) { // use type assertion to loop over map[string]interface{}
|
|
fetchValue(v)
|
|
}
|
|
default:
|
|
fmt.Printf("%v is unknown \n ", value)
|
|
}
|
|
}
|
|
|
|
// prettyPrint comes from https://gist.github.com/sfate/9d45f6c5405dc4c9bf63bf95fe6d1a7c
|
|
func prettyPrint(args ...interface{}) {
|
|
var caller string
|
|
|
|
timeNow := time.Now().Format("01-02-2006 15:04:05")
|
|
prefix := fmt.Sprintf("[%s] %s -- ", "PrettyPrint", timeNow)
|
|
_, fileName, fileLine, ok := runtime.Caller(1)
|
|
|
|
if ok {
|
|
caller = fmt.Sprintf("%s:%d", fileName, fileLine)
|
|
} else {
|
|
caller = ""
|
|
}
|
|
|
|
fmt.Printf("\n%s%s\n", prefix, caller)
|
|
|
|
if len(args) == 2 {
|
|
label := args[0]
|
|
value := args[1]
|
|
|
|
s, _ := json.MarshalIndent(value, "", "\t")
|
|
fmt.Printf("%s%s: %s\n", prefix, label, string(s))
|
|
} else {
|
|
s, _ := json.MarshalIndent(args, "", "\t")
|
|
fmt.Printf("%s%s\n", prefix, string(s))
|
|
}
|
|
}
|