import getopt
import glob
import os
import sys
import stanza
from stanza.resources.common import load_resources_json
from stanza.utils.conll import CoNLL


def parse(input_file, output_file):
	with open(input_file) as f:
		doc = nlp(f.read())
		CoNLL.write_doc2conll(doc, output_file)


language = "pl"
use_pretokenized_text = False
options = "hpl:"
long_options = ["help", "pretokenized", "language="]
processors = "tokenize, pos, lemma, depparse"

try:
    opts, args = getopt.getopt(sys.argv[1:], options, long_options)
    for opt, arg in opts:
        if opt in ("-h", "--help"):
            pass
        elif opt in ("-p", "--pretokenized"):
            use_pretokenized_text = True
        elif opt in ("-l", "--language"):
            language = arg
except getopt.error as err:
    print(f"Error: {str(err)}")
    exit(1)

if args:
	input_files = []
	for arg in args:
		input_files.extend(glob.glob(arg))

stanza.download(language)
resources = load_resources_json()
if 'ner' in resources[language]:
	processors += ", ner"

nlp = stanza.Pipeline(lang = language, processors=processors, tokenize_pretokenized=use_pretokenized_text)
for f_in in input_files:
	if os.path.isfile(f_in):
		dir_name, file_name = os.path.split(f_in)
		file_name = os.path.splitext(file_name)[0] + '.conllu'
		f_out = os.path.join(dir_name, file_name)
		parse(f_in, f_out)
