blob: 3ca4b8f7d8635ab67105ab0627d284967329e92a (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
|
csv_db:"test.csv"
on load_db db do
data:read[db]
out:readcsv[data "sss"]
end
on save_db t db do
out:writecsv[t]
write[db out]
end
on entry d t do
# where d is a table representing a new row to be added to t, an existing table
date_fmt_string:"date +%Y-%m-%d | tr -d '\n'"
wget_fmt_string:"wget -qO- " , d.url , " | grep -o \"<title>[^<]*\" | sed -e 's/<[^>]*>//g' | tr -d '\n'"
wget_res:shell[wget_fmt_string].exit
# if wget_res.exit = -1 do
# exit["\n wget failed\n"]
# else
# t:insert title:(wget_res.out) reference:(d.url) created_date:(shell[date_fmt_string].out) into t
# end
t:insert title:(shell[wget_fmt_string].out) reference:(d.url) created_date:(shell[date_fmt_string].out) into t
end
if args[2] do
# read in a csv, and store in memory as a table
catalogue:load_db[csv_db]
if args[2] ~ "-l" do
show[catalogue] # listing everything is probs not the most ideal if you've got thousands of entries...
else
url:args[2]
# add a row to the table stored in memory
row.url:url
row:table row
catalogue:entry[row catalogue]
# write the updated data to csv
save_db[catalogue csv_db]
end
else
print["\n please pass a url as an argument\n"]
end
|