use std::collections::HashMap;
use pyo3::prelude::*;
use pyo3::types::IntoPyDict;
pub fn mwt_expand(text:&str,lang:&str)->Vec<Vec<HashMap<String,String>>>{
match _mwt_expand(text,lang){
Ok(t)=>{
t
},
Err(e)=>{
println!("{:?}",e);
Vec::new()
}
}
}
fn _mwt_expand(text:&str,lang:&str) -> PyResult<Vec<Vec<HashMap<String,String>>>> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def mwt_expand(text,lang):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.mwt_expand(text=text,lang=lang)\n"
);
let kwargs = vec![
( "text", text),
("lang",lang)
];
Python::with_gil(|py| {
let results:Vec<Vec<HashMap<String,String>>>= PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("mwt_expand")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
Ok(results)
})
}
pub fn download_langs(list_lang:Vec<&str>){
for lang in list_lang{
let flag=download_lang(lang);
if flag{
println!("Downloaded successfully!")
}else{
println!("Downloaded failed!")
}
}
}
pub fn download_lang(lang:&str)->bool{
match _download_lang(lang){
Ok(t)=>{
t
},
Err(e)=>{
println!("{:?}",e);
false
}
}
}
fn _download_lang(lang:&str) -> PyResult<bool> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def download_lang(lang):\n",
"\tflag=False\n",
"\tsw=StanzaWrapper()\n",
"\tsw.download(lang=lang)\n",
"\treturn not False\n"
);
let kwargs = vec![
( "lang", lang)
];
Python::with_gil(|py| {
let downloaded:bool= PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("download_lang")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
Ok(downloaded)
})
}
pub fn lang(list_str:Vec<&str>)->Vec<HashMap<String,String>>{
match _lang(list_str){
Ok(t)=>{
t
},
Err(e)=>{
println!("{:?}",e);
Vec::new()
}
}
}
fn _lang(list_text:Vec<&str>) -> PyResult<Vec<HashMap<String,String>>> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def lang(list_text):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.lang(list_text=list_text)\n"
);
let kwargs = vec![
( "list_text", list_text)
];
Python::with_gil(|py| {
let tags:Vec<HashMap<String,String>>= PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("lang")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
Ok(tags)
})
}
pub fn pos(text:&str,lang:&str)->Vec<HashMap<String,String>>{
let tag_result;
if lang=="zh"{
tag_result=_pos_chinese(text,lang)
}else{
tag_result=_pos(text,lang)
}
match tag_result{
Ok(t)=>{
println!("{:?}",t);
t
},
Err(e)=>{
println!("{:?}",e);
Vec::new()
}
}
}
fn _pos(text:&str,lang:&str) -> PyResult<Vec<HashMap<String,String>>> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def tag(text,lang):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.tag(text=text,lang=lang)\n"
);
let kwargs = vec![
( "text", text),
( "lang", lang)
];
Python::with_gil(|py| {
let tags:Vec<HashMap<String,String>>= PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("tag")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
println!("{:?}",tags);
Ok(tags)
})
}
fn _pos_chinese(text:&str,lang:&str) -> PyResult<Vec<HashMap<String,String>>> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def tag_chinese(text,lang):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.tag_chinese(text=text,lang=lang)\n"
);
let kwargs = vec![
( "text", text),
( "lang", lang)
];
Python::with_gil(|py| {
let tags:Vec<HashMap<String,String>>= PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("tag_chinese")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
println!("{:?}",tags);
Ok(tags)
})
}
pub fn sentiment(text:&str,lang:&str)->Vec<HashMap<String,String>>{
match _sentiment(text,lang){
Ok(t)=>{
println!("{:?}",t);
t
},
Err(e)=>{
println!("{:?}",e);
Vec::new()
}
}
}
fn _sentiment(text:&str,lang:&str) -> PyResult<Vec<HashMap<String,String>>> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def sentiment(text,lang):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.sentiment(text=text,lang=lang)\n"
);
let kwargs = vec![
( "text", text),
( "lang", lang)
];
Python::with_gil(|py| {
let sentiments:Vec<HashMap<String,String>>= PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("sentiment")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
println!("{:?}",sentiments);
Ok(sentiments)
})
}
pub fn tokenize(text:&str,lang:&str)->Vec<Vec<HashMap<String,String>>>{
match _tokenize(text,lang){
Ok(t)=>{
println!("{:?}",t);
t
},
Err(e)=>{
println!("{:?}",e);
Vec::new()
}
}
}
fn _tokenize(text:&str,lang:&str) -> PyResult<Vec<Vec<HashMap<String,String>>>> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def tokenize(text,lang):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.tokenize(text=text,lang=lang)\n"
);
let kwargs = vec![
( "text", text)
, ("lang", lang)
];
Python::with_gil(|py| {
let list_result:Vec<Vec<HashMap<String,String>>> = PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("tokenize")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
println!("{:?}",list_result);
Ok(list_result)
})
}
pub fn tokenize_sentence(text:&str,lang:&str)->Vec<String>{
match _tokenize_sentence(text,lang){
Ok(t)=>{
println!("{:?}",t);
t
},
Err(e)=>{
println!("{:?}",e);
Vec::new()
}
}
}
fn _tokenize_sentence(text:&str,lang:&str) -> PyResult<Vec<String>> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def tokenize(text,lang):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.tokenize_sentence(text=text,lang=lang)\n"
);
let kwargs = vec![
( "text", text)
, ("lang", lang)
];
Python::with_gil(|py| {
let list_result:Vec<String> = PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("tokenize_sentence")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
println!("{:?}",list_result);
Ok(list_result)
})
}
pub fn ner(text:&str,lang:&str)->Vec<HashMap<String,String>>{
match _ner(text,lang){
Ok(t)=>{
t
},
Err(e)=>{
println!("{:?}",e);
Vec::new()
}
}
}
fn _ner(text:&str,lang:&str) -> PyResult<Vec<HashMap<String,String>>> {
let py_ner=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def getNER(text,lang):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.ner(text=text,lang=lang)\n"
);
let kwargs = vec![(
"text", text)
, ("lang", lang)
];
Python::with_gil(|py| {
let list_result:Vec<HashMap<String,String>> = PyModule::from_code(
py,
py_ner,
"",
"",
)?.getattr("getNER")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
Ok(list_result)
})
}
pub fn dependency_tree(text:&str,lang:&str)->Vec<Vec<HashMap<String,String>>>{
let result;
if lang=="zh"{
result=_dependency_tree_chinese(text,lang);
}else{
result=_dependency_tree(text,lang);
}
match result{
Ok(t)=>{
t
},
Err(e)=>{
println!("{:?}",e);
Vec::new()
}
}
}
fn _dependency_tree_chinese(text:&str,lang:&str) -> PyResult<Vec<Vec<HashMap<String,String>>>> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def parse_dependency_chinese(text,lang):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.parse_dependency_chinese(text=text,lang=lang)\n"
);
let kwargs = vec![
( "text", text)
, ("lang", lang)
];
Python::with_gil(|py| {
let list_result:Vec<Vec<HashMap<String,String>>> = PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("parse_dependency_chinese")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
println!("{:?}",list_result);
Ok(list_result)
})
}
fn _dependency_tree(text:&str,lang:&str) -> PyResult<Vec<Vec<HashMap<String,String>>>> {
let py_tokenize=concat!(
"from nerkit.StanzaApi import StanzaWrapper\n",
"def parse_dependency(text,lang):\n",
"\tsw=StanzaWrapper()\n",
"\treturn sw.parse_dependency(text=text,lang=lang)\n"
);
let kwargs = vec![
( "text", text)
, ("lang", lang)
];
Python::with_gil(|py| {
let list_result:Vec<Vec<HashMap<String,String>>> = PyModule::from_code(
py,
py_tokenize,
"",
"",
)?.getattr("parse_dependency")?.call((),Some(kwargs.into_py_dict(py)))?.extract()?;
println!("{:?}",list_result);
Ok(list_result)
})
}