csv_db:"test.csv" on load_db db do data:read[db] out:readcsv[data "sss"] end on save_db t db do out:writecsv[t] write[db out] end on entry d t do # where d is a table representing a new row to be added to t, an existing table date_fmt_string:"date +%Y-%m-%d | tr -d '\n'" wget_fmt_string:"wget -qO- " , d.url , " | grep -o \"[^<]*\" | sed -e 's/<[^>]*>//g' | tr -d '\n'" wget_res:shell[wget_fmt_string].exit # if wget_res.exit = -1 do # exit["\n wget failed\n"] # else # t:insert title:(wget_res.out) reference:(d.url) created_date:(shell[date_fmt_string].out) into t # end t:insert title:(shell[wget_fmt_string].out) reference:(d.url) created_date:(shell[date_fmt_string].out) into t end if args[2] do # read in a csv, and store in memory as a table catalogue:load_db[csv_db] if args[2] ~ "-l" do show[catalogue] # listing everything is probs not the most ideal if you've got thousands of entries... else url:args[2] # add a row to the table stored in memory row.url:url row:table row catalogue:entry[row catalogue] # write the updated data to csv save_db[catalogue csv_db] end else print["\n please pass a url as an argument\n"] end