Skip to content

Commit

Permalink
start
Browse files Browse the repository at this point in the history
  • Loading branch information
ohiTuna committed Apr 25, 2018
0 parents commit 8a4c3d1
Showing 1 changed file with 94 additions and 0 deletions.
94 changes: 94 additions & 0 deletions samhsa.go
Original file line number Diff line number Diff line change
@@ -0,0 1,94 @@
package main

import (

"fmt"
"log"
"github.com/PuerkitoBio/goquery"
)


func main() {
doc, err := goquery.NewDocument("http://www.samhsa.gov/medication-assisted-treatment/physician-program-data/certified-physicians?field_bup_us_state_code_value=All&page=1")
if err != nil {
log.Fatal(err)
}

// Find the review items
doc.Find("views-field views-field-field-bup-year-certified .field-content").Each(func(i int, s *goquery.Selection) {
// For each item found, get the band and title
fmt.Println(i)
band := s.Find("Year:").Text()
// title := s.Find("i").Text()
fmt.Printf("yearr %d: %v\n", i, band)
fmt.Println(band)
})




/*
func main() {
//creates URLS and writes different URLs as slices to 'urls'
for i := 0; i <= inRuns; i {
fmt.Println("making URL slices")
urlss := fmt.Sprintf("https://www.samhsa.gov/medication-assisted-treatment/physician-program-data/certified-physicians?field_bup_us_state_code_value=All&page=1
lurl := fmt.Sprintf("https://tradeblock.com/api/blockchain/statistics/%s/%s/%d", inType,
inInterval, currentTimestamp)
urls = append(urls, lurl)
if inInterval == "1h" {
currentTimestamp -= 3600000
} else if inInterval == "2h" {
currentTimestamp -= 7200000
} else if inInterval == "6h" {
currentTimestamp -= 21600000
} else if inInterval == "1d" {
currentTimestamp -= 86400000
}
}
fmt.Println(urls[0])
//carries out execution of decoder and reader/writer
dataset := make([]Stat, 0, len(urls)*1)
fmt.Println("dataset made okay")
for _, lurl := range urls {
Currentdat := reader(inRuns, lurl)
dataset = append(dataset, Currentdat.Data...)
fmt.Println("appending...")
}
fmt.Println("trying write...")
writes(dataset, Filecsv)
fmt.Println("write done!")
}
//decodes retrieved JSON data for each URL
func reader(inRuns int, urls ...string) (Currentdat AutoGenerated) {
i := 0
response, err := http.Get(urls[i])
if err != nil {
log.Fatalf("failed to get JSON data: %v", err)
}
// defer response.Body.Close()
dec := json.NewDecoder(response.Body)
dec.UseNumber()
Currentdat = AutoGenerated{}
err = dec.Decode(&Currentdat)
if err != nil {
log.Fatalf("Failed to decode the JSON body: %v", err)
}
noter := string("looks good")
fmt.Println(noter, i)
i = 1
return Currentdat
}
*/
}
# DATA2000
# DATA2000

0 comments on commit 8a4c3d1

Please sign in to comment.