CSV to JSON

This commit is contained in:
Meutel 2017-07-09 10:03:55 +02:00
parent 6478ac9deb
commit 6003765d39
5 changed files with 273 additions and 0 deletions

2
.gitignore vendored
View File

@ -41,3 +41,5 @@ shadirconcurrent/shadirconcurrent
pinger/pinger
datediff/datediff
example-json/example-json
financialjson/financialjson
statecsv2json/statecsv2json

1
financial.json Normal file
View File

@ -0,0 +1 @@
[{"Date":"2017-06-08T00:00:00Z","Open":982.349976,"High":984.570007,"Low":977.200012,"Close":983.409973,"Adj_Close":983.409973,"Volume":1481900},{"Date":"2017-06-09T00:00:00Z","Open":984.5,"High":984.5,"Low":935.630005,"Close":949.830017,"Adj_Close":949.830017,"Volume":3309400},{"Date":"2017-06-12T00:00:00Z","Open":939.559998,"High":949.35498,"Low":915.232971,"Close":942.900024,"Adj_Close":942.900024,"Volume":3763500},{"Date":"2017-06-13T00:00:00Z","Open":951.909973,"High":959.97998,"Low":944.090027,"Close":953.400024,"Adj_Close":953.400024,"Volume":2013300},{"Date":"2017-06-14T00:00:00Z","Open":959.919983,"High":961.150024,"Low":942.25,"Close":950.76001,"Adj_Close":950.76001,"Volume":1489700},{"Date":"2017-06-15T00:00:00Z","Open":933.969971,"High":943.338989,"Low":924.440002,"Close":942.309998,"Adj_Close":942.309998,"Volume":2133100},{"Date":"2017-06-16T00:00:00Z","Open":940,"High":942.039978,"Low":931.594971,"Close":939.780029,"Adj_Close":939.780029,"Volume":3094700},{"Date":"2017-06-19T00:00:00Z","Open":949.960022,"High":959.98999,"Low":949.049988,"Close":957.369995,"Adj_Close":957.369995,"Volume":1533300},{"Date":"2017-06-20T00:00:00Z","Open":957.52002,"High":961.619995,"Low":950.01001,"Close":950.630005,"Adj_Close":950.630005,"Volume":1126000},{"Date":"2017-06-21T00:00:00Z","Open":953.640015,"High":960.099976,"Low":950.76001,"Close":959.450012,"Adj_Close":959.450012,"Volume":1202200},{"Date":"2017-06-22T00:00:00Z","Open":958.700012,"High":960.719971,"Low":954.549988,"Close":957.090027,"Adj_Close":957.090027,"Volume":941400},{"Date":"2017-06-23T00:00:00Z","Open":956.830017,"High":966,"Low":954.200012,"Close":965.590027,"Adj_Close":965.590027,"Volume":1527900},{"Date":"2017-06-26T00:00:00Z","Open":969.900024,"High":973.309998,"Low":950.789978,"Close":952.27002,"Adj_Close":952.27002,"Volume":1598400},{"Date":"2017-06-27T00:00:00Z","Open":942.460022,"High":948.289978,"Low":926.849976,"Close":927.330017,"Adj_Close":927.330017,"Volume":2579900},{"Date":"2017-06-28T00:00:00Z","Open":929,"High":942.75,"Low":916,"Close":940.48999,"Adj_Close":940.48999,"Volume":2721400},{"Date":"2017-06-29T00:00:00Z","Open":929.919983,"High":931.26001,"Low":910.619995,"Close":917.789978,"Adj_Close":917.789978,"Volume":3299200},{"Date":"2017-06-30T00:00:00Z","Open":926.049988,"High":926.049988,"Low":908.309998,"Close":908.72998,"Adj_Close":908.72998,"Volume":2090200},{"Date":"2017-07-03T00:00:00Z","Open":912.179993,"High":913.940002,"Low":894.789978,"Close":898.700012,"Adj_Close":898.700012,"Volume":1709800},{"Date":"2017-07-05T00:00:00Z","Open":901.76001,"High":914.51001,"Low":898.5,"Close":911.710022,"Adj_Close":911.710022,"Volume":1813900},{"Date":"2017-07-06T00:00:00Z","Open":904.119995,"High":914.94397,"Low":899.700012,"Close":906.690002,"Adj_Close":906.690002,"Volume":1415200},{"Date":"2017-07-07T00:00:00Z","Open":908.849976,"High":921.539978,"Low":908.849976,"Close":918.590027,"Adj_Close":918.590027,"Volume":1637785}]

112
financialjson/main.go Normal file
View File

@ -0,0 +1,112 @@
package main
import "encoding/csv"
import "encoding/json"
import "time"
import "io"
import "log"
import "os"
import "strconv"
var COLUMNS []string
type FinancialData struct {
Date time.Time
Open, High, Low, Close, Adj_Close, Volume float64
}
func readCsvLine(line []string) *FinancialData {
data := new(FinancialData)
for i, col := range line {
switch COLUMNS[i] {
case "Date":
d, err := time.Parse("2006-01-02", col)
if err != nil {
panic(err)
} else {
data.Date = d
}
case "Open":
data.Open = toFloat(col)
case "High":
data.High = toFloat(col)
case "Low":
data.Low = toFloat(col)
case "Close":
data.Close = toFloat(col)
case "Adj Close":
data.Adj_Close = toFloat(col)
case "Volume":
data.Volume = toFloat(col)
}
}
return data
}
func toFloat(v string) float64 {
f, err := strconv.ParseFloat(v, 64)
if err != nil {
log.Fatalln(err)
}
return f
}
func readCsvHeader(line []string) {
for _, col := range line {
COLUMNS = append(COLUMNS, col)
}
}
func readCsv(in io.Reader) []FinancialData {
data := []FinancialData{}
csvReader := csv.NewReader(in)
for {
line, err := csvReader.Read()
if err == io.EOF {
break
} else if err != nil {
log.Fatal(err)
}
if len(COLUMNS) == 0 {
readCsvHeader(line)
} else {
data = append(data, *readCsvLine(line))
}
}
return data
}
func main() {
if len(os.Args) < 2 {
log.Fatalln("Usage go-financial <file>")
}
// read input
csv, err := os.Open(os.Args[1])
if err != nil {
log.Fatalln("Error reading file", err)
}
defer csv.Close()
// parse data
data := readCsv(csv)
// convert JSON
var writer io.Writer
if len(os.Args) > 2 {
f, ferr := os.Create(os.Args[2])
if ferr != nil {
panic(ferr)
}
defer f.Close()
writer = f
} else {
writer = os.Stdout
}
err = json.NewEncoder(writer).Encode(data)
if err != nil {
panic(err)
}
}

1
state_table.json Normal file

File diff suppressed because one or more lines are too long

157
statecsv2json/main.go Normal file
View File

@ -0,0 +1,157 @@
package main
import "encoding/csv"
import "encoding/json"
import "io"
import "log"
import "os"
import "strconv"
type StateData struct {
Id int
Name string
Abbreviation string
Country string
Type_state string
Sort int
Status string
Occupied string
Notes string
Fips_state string
Assoc_press string
Standard_federal_region string
Census_region int
Census_region_name string
Census_division int
Census_division_name string
Circuit_court int
}
func readCsvHeader(line []string) (map[string]int, error) {
columns := make(map[string]int)
for i, col := range line {
columns[col] = i
}
return columns, nil
}
func readCsvLine(line []string, columns map[string]int) (*StateData, error) {
var err error
data := new(StateData)
for i, col := range line {
switch i {
case columns["id"]:
data.Id, err = strconv.Atoi(col)
if err != nil {
return data, err
}
case columns["name"]:
data.Name = col
case columns["abbreviation"]:
data.Abbreviation = col
case columns["country"]:
data.Country = col
case columns["type"]:
data.Type_state = col
case columns["sort"]:
data.Sort, err = strconv.Atoi(col)
if err != nil {
return data, err
}
case columns["status"]:
data.Status = col
case columns["occupied"]:
data.Occupied = col
case columns["notes"]:
data.Notes = col
case columns["fips_state"]:
data.Fips_state = col
case columns["assoc_press"]:
data.Assoc_press = col
case columns["standard_federal_region"]:
data.Standard_federal_region = col
case columns["census_region"]:
data.Census_region, err = strconv.Atoi(col)
if err != nil {
return data, err
}
case columns["census_region_name"]:
data.Census_region_name = col
case columns["census_division"]:
data.Census_division, err = strconv.Atoi(col)
if err != nil {
return data, err
}
case columns["census_division_name"]:
data.Census_division_name = col
case columns["circuit_court"]:
data.Circuit_court, err = strconv.Atoi(col)
if err != nil {
return data, err
}
}
}
return data, nil
}
func readCsv(in io.Reader) map[string]*StateData {
data := make(map[string]*StateData)
csvReader := csv.NewReader(in)
var header map[string]int
for {
line, err := csvReader.Read()
if err == io.EOF {
break
} else if err != nil {
log.Fatal(err)
}
if len(header) == 0 {
header, err = readCsvHeader(line)
if err != nil {
log.Fatal(err)
}
} else {
d, err := readCsvLine(line, header)
if err != nil {
log.Println("Invalid line", err) // skip
} else {
data[d.Abbreviation] = d
}
}
}
return data
}
func main() {
if len(os.Args) < 2 {
log.Fatalln("Usage statecsv <file>")
}
// read input
csv, err := os.Open(os.Args[1])
if err != nil {
log.Fatalln("Error reading file", err)
}
defer csv.Close()
// parse data
data := readCsv(csv)
// convert JSON
var writer io.Writer
if len(os.Args) > 2 {
f, ferr := os.Create(os.Args[2])
if ferr != nil {
panic(ferr)
}
defer f.Close()
writer = f
} else {
writer = os.Stdout
}
err = json.NewEncoder(writer).Encode(data)
if err != nil {
panic(err)
}
}