Commit 6720eb04 authored by Paul Würtz's avatar Paul Würtz

adaptu por preni kaj procesi frazojn de tatoeba

parent 27739bb9
......@@ -9,3 +9,4 @@ frazojn.json
vortoj_frazoj.json
package-lock.json
static/package-lock.json
tmp/*
.PHONY: dualingvo
default: dualingvo ;
dualingvo: malpuraj_html_frazoj.json
tmp/sentences.tar.bz2:
wget http://downloads.tatoeba.org/exports/sentences.tar.bz2 -O tmp/sentences.tar.bz2
tmp/links.tar.bz2:
wget http://downloads.tatoeba.org/exports/links.tar.bz2 -O tmp/links.tar.bz2
tmp/tags.tar.bz2:
wget http://downloads.tatoeba.org/exports/tags.tar.bz2 -O tmp/tags.tar.bz2
tmp/sentences.csv: tmp/sentences.tar.bz2
tar jxf tmp/sentences.tar.bz2 -C tmp
touch tmp/sentences.csv
tmp/links.csv: tmp/links.tar.bz2
tar jxf tmp/links.tar.bz2 -C tmp
touch tmp/links.csv
tmp/eo_links.json: tmp/links.csv tmp/eo_links.csv
python helpiloj/links_to_json.py
tmp/eo_sentences.csv: tmp/sentences.csv
cat tmp/sentences.csv | grep -P "\tepo\t" | awk '{print $1;}' > tmp/eo_sentences.csv
tmp/eo_links.csv: tmp/links.csv tmp/eo_sentences.csv
cut -f 1 tmp/eo_sentences.csv > tmp/eo_links.csv
tmp/tags.csv: tmp/tags.tar.bz2
tar jxf tmp/tags.tar.bz2 -C tmp
touch tmp/tags.csv
tmp/eo_frazoj.json: tmp/sentences.csv tmp/eo_links.json tmp/tags.csv
python helpiloj/sentences_to_json.py
tmp/eo_enfrge.json: tmp/eo_frazoj.json
python helpiloj/extract_most_contributed_languages.py
dualingvo: tmp/eo_enfrge.json
npm install https://github.com/martinrue/eopl/tarball/master
pip install -r requirements.txt --no-index
cd static/ && npm install && cd ..
mkdir -p leckcioj
python vortlist_sxerpilo.py
node eopl_transskribilo.js > frazojn.json
python frazoj_tts_eopl.py
python dua_lingo_app.py freeze
malpuraj_html_frazoj.json:
bash -c "python duolingo_webapp_esperanto_words.json"
import json
import collections
fr = json.loads(open("eo_fr.json").read())
fr_lingvoj = []
enfrgel={}
for k,v in fr.items():
notrans = 0
langs = v.keys()
if len(langs) == 0:
notrans+=1
lp = 0
lp += 1 if "deu" in langs else 0
lp += 1 if "fra" in langs else 0
lp += 1 if "eng" in langs else 0
if lp >= 3:
fr_lingvoj += list(langs)
enfrgel[k]=v
print(collections.Counter(fr_lingvoj))
print("notrans",notrans)
print("enfrge",len(enfrgel))
open("eo_fr_enfrge.json","w").write(json.dumps(enfrgel, ensure_ascii=False, indent=4))
words = list([w.lower().replace(",","").replace("!","").replace("?","").replace(".","") for f in enfrgel.keys() for w in f.split()])
print(collections.Counter(words).most_common())
c = collections.Counter(words)
print("len(set(words))",len(set(words)))
import json
links = [[int(e) for e in l.split("\t") if e] for l in open("tmp/links.csv").read().split("\n")]
eo_links = [int(l) for l in open("tmp/eo_links.csv").read().split("\n") if l]
max_eo = max(eo_links)
is_eo_link = [False]*(max_eo+1)
for l in eo_links:
is_eo_link[l] = True
linkedListed = {l[0]:[] for l in links if len(l) == 2 and max_eo >= l[0] and is_eo_link[l[0]]}
l1, l2 = len(linkedListed), len(is_eo_link)
for l in links:
if len(l) != 2 or max_eo < l[0] or not is_eo_link[l[0]]:
continue
linkedListed[l[0]] += [int(l[1])]
linkedListed_no_empties = {k:v for k,v in linkedListed.items() if v}
open("tmp/eo_links.json","w").write(json.dumps(linkedListed_no_empties, ensure_ascii=False, indent=4))
import json
fr = [t.split("\t") for t in open("sentences.csv").read().split("\n")]
frb = {int(f[0]):f[1:] for f in fr if f[0]}
links = json.loads(open("links.json").read())
tags = [t.split("\t") for t in open("tags.csv").read().split("\n")]
tags = {int(f[0]):f[1:] for f in tags if f[0]}
frr={}
failed = 0
for k,v in links.items():
try:
k=int(k)
if frb[k][0]=="epo":
frr[frb[k][1]] = {frb[int(vv)][0]:frb[int(vv)][1] for vv in v}
frr[frb[k][1]]["tags"] = tasg[k] if k in tags else []
frr[frb[k][1]]["identigilo"] = k
except Exception as e:
failed+=1
print("Failed {}/{}".format(failed,len(links.keys())))
open("tmp/eo_frazoj.json","w").write(json.dumps(frr, ensure_ascii=False, indent=4))
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment