-
Notifications
You must be signed in to change notification settings - Fork 2
/
Code.gs
143 lines (98 loc) · 4.45 KB
/
Code.gs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
/********* SET YOUR GLOBAL PARAMETERS HERE ********/
//change them with yours
const SUPABASE_PROJECT = 'https://YOURPROJECTID.supabase.co'
const SUPABASE_API_KEY = 'YOURANONORSERVICEROLEAPIKEY'
const ID_SHEET = 'YOURSHEETID'
const NAME_TABLE = 'YOURTABLEORVIEW'
const WHERE_CONDITION = 'field=eq.fieldvalue'//check out the where condition in supabase API docs : https://supabase.com/docs/guides/api
const BATCH_SIZE = 2500 //Set this to a number SMALLER than your supabase max row limits (https://data-addict.jadynekena.com/pull-datas-from-supabase-to-google-sheets/#:~:text=Your supabase max rows limit, which can be changed here)
/********* END OF YOUR GLOBAL PARAMETERS ********/
/********** OTHER VARIABLES YOU DON'T HAVE TO CHANGE ***********/
const TARGET_SHEET = SpreadsheetApp.openById(ID_SHEET)
const TARGET_SHEET_TAB = TARGET_SHEET.getSheets()[0] //this pulls datas into the first tab of your sheet. You can also use : TARGET_SHEET.getSheetByName('nameofyoursheettab')
const SUPABASE_URL = SUPABASE_PROJECT '/rest/v1/'
/********** END OF OTHER VARIABLES YOU DON'T HAVE TO CHANGE ***********/
//run this manually first before creating your trigger
function main() {
datas = get_datas(NAME_TABLE,WHERE_CONDITION)
insert_datas(TARGET_SHEET_TAB,datas)
}
/***************************************************** USEFUL FUNCTIONS DOWN BELOW **************************************************/
//depending on the where condition of your query, we use '?' or '&' in the fetched URL
function apikey(symbol){
return symbol 'apikey=' SUPABASE_API_KEY
}
function localTest(){
const res = get_datas(NAME_TABLE,'')
console.log(res)
}
function get_datas(name_table,where_condition){
var finalDatas = []
//get 1st data to know the exact amount of datas
const content_range = get_datas_chunk(name_table,where_condition,0,1,true)
const number_of_records = content_range.split('/')[1]
console.log({number_of_records})
for(var optionalOffset = 0 ; optionalOffset <= number_of_records ; optionalOffset = optionalOffset BATCH_SIZE ){
console.log('\n\n\n\noptionalOffset',optionalOffset ' / ' number_of_records)
const temp = get_datas_chunk(name_table,where_condition,optionalOffset,BATCH_SIZE)
const sizeCharLog = 500
console.log(' ' sizeCharLog ' last characters = ',temp.slice(temp.length - sizeCharLog))
const tempJSONarray = JSON.parse(temp)
finalDatas.push.apply(finalDatas, tempJSONarray);
console.log(' current retrieved datas size = ' finalDatas.length )
}
return JSON.stringify(finalDatas)
}
function get_datas_chunk(name_table,where_condition,optionalOffset,optionalBatchsize,content_range_mode) {
symbol = where_condition ? '&' : '?'
url = SUPABASE_URL name_table (where_condition ? "?" where_condition : '') apikey(symbol)
url = optionalOffset ? '&offset=' optionalOffset : ""
url = optionalBatchsize ? '&limit=' optionalBatchsize :
BATCH_SIZE ? '&limit=' BATCH_SIZE : ""
//check in the logger if the url is ok
console.log({url})
//fetching with the right headers
response = UrlFetchApp.fetch(url, {
headers : content_range_mode ? {Prefer: 'count=exact'} : {}
})
const content_range = response.getAllHeaders()['Content-Range'] //'Content-Range': '0-0/29131',
//uncomment this if you want to monitor your datas
/*
console.log({content_range})
console.log({response})
console.log(response.getAllHeaders())
*/
return content_range_mode ? content_range : response.getContentText();
}
async function insert_async(sh, all_rows){
sh.getRange(1,1,all_rows.length,all_rows[0].length).setValues(all_rows)
return all_rows
}
function insert_datas(sh,jsondatas){
//always clear previous versions
sh.clear()
var all_rows = [[]]
if(JSON.parse(jsondatas).length > 0){
all_rows = json2arrays(jsondatas)
insert_async(sh, all_rows) //async function => it doesn't wait for the function to end
}
console.log('INSERTED ' (all_rows.length) ' ROWS ' ' WITH ' all_rows[0].length ' COLUMNS.' )
return all_rows;
}
function json2arrays(jsoncontent){
var json_list = JSON.parse(jsoncontent)
var array = [];
var headers = []
var firstline = json_list[0]
if(firstline){
headers = Object.keys(firstline)
array.push(headers)
}
for(var i = 0; i < json_list.length; i ) {
var obj = json_list[i];
var temp = []
Object.keys(obj).map((key) => temp.push(obj[key]))
array.push(temp);
}
return array
}