mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
Integrate changes and improving code style
- Integrate voxygen/i18n-check to voxygnen/src/i18n.rs - Make `cargo clippy` happy
This commit is contained in:
parent
57cddb3ee3
commit
f42e8aa147
@ -178,11 +178,13 @@ fn read_file_from_path<'a>(
|
||||
.peel_to_tree()
|
||||
.expect("Impossible to peel HEAD to a tree object");
|
||||
tree.get_path(path)
|
||||
.expect(&format!(
|
||||
"Impossible to find the file {:?} in reference {:?}",
|
||||
path,
|
||||
reference.name()
|
||||
))
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Impossible to find the file {:?} in reference {:?}",
|
||||
path,
|
||||
reference.name()
|
||||
)
|
||||
})
|
||||
.to_object(&repo)
|
||||
.unwrap()
|
||||
.peel_to_blob()
|
||||
@ -256,7 +258,7 @@ fn generate_key_version<'a>(
|
||||
error_check_set.push(key.clone());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
if line + 1 >= e.final_start_line()
|
||||
@ -273,7 +275,7 @@ fn generate_key_version<'a>(
|
||||
Ok(false) => Some(existing_commit),
|
||||
Err(err) => panic!("{}", err),
|
||||
}
|
||||
}
|
||||
},
|
||||
None => Some(e.final_commit_id()),
|
||||
};
|
||||
}
|
||||
@ -291,7 +293,7 @@ fn complete_key_versions<'a>(
|
||||
asset_path: &Path,
|
||||
) {
|
||||
//TODO: review unwraps in this file
|
||||
|
||||
|
||||
// For each file (if it's not a directory) in directory
|
||||
for i18n_file in root_dir.join(&asset_path).read_dir().unwrap().flatten() {
|
||||
if let Ok(file_type) = i18n_file.file_type() {
|
||||
@ -310,7 +312,7 @@ fn complete_key_versions<'a>(
|
||||
e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
},
|
||||
};
|
||||
i18n_key_versions.extend(generate_key_version(&repo, &i18n, &path, &i18n_blob));
|
||||
}
|
||||
@ -339,7 +341,7 @@ fn verify_localization_directory(root_dir: &Path, directory_path: &Path) {
|
||||
full_path.to_string_lossy(),
|
||||
e
|
||||
);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -385,7 +387,7 @@ pub fn verify_all_localizations(root_dir: &Path, asset_path: &Path) {
|
||||
i18n_directory.to_string_lossy(),
|
||||
e
|
||||
);
|
||||
}
|
||||
},
|
||||
};
|
||||
// Walk through each files and try to load them
|
||||
verify_localization_directory(root_dir, &i18n_directory);
|
||||
@ -397,9 +399,8 @@ pub fn verify_all_localizations(root_dir: &Path, asset_path: &Path) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// `asset_path` - path to localization directory. Relative from root of the repo.
|
||||
/// `root_dir` - absolute path to repo
|
||||
/// `asset_path` - path to localization directory. Relative from root of the
|
||||
/// repo. `root_dir` - absolute path to repo
|
||||
/// `ref_i18n_path` - path to reference manifest
|
||||
/// `i18n_references` - keys from reference language
|
||||
/// `repo` - git object for main repo
|
||||
@ -407,13 +408,13 @@ pub fn verify_all_localizations(root_dir: &Path, asset_path: &Path) {
|
||||
fn test_localization_directory(
|
||||
asset_path: &Path,
|
||||
root_dir: &Path,
|
||||
ref_i18n_path: &PathBuf,
|
||||
ref_i18n_path: &Path,
|
||||
i18n_references: &HashMap<String, LocalizationEntryState>,
|
||||
repo: &git2::Repository,
|
||||
head_ref: &git2::Reference,
|
||||
) -> Option<FindLocalization> {
|
||||
let relfile = asset_path.join(&(LANG_MANIFEST_FILE.to_string() + ".ron"));
|
||||
if relfile == ref_i18n_path.clone() {
|
||||
if relfile == ref_i18n_path {
|
||||
return None;
|
||||
}
|
||||
println!("\n-----------------------------------");
|
||||
@ -431,7 +432,7 @@ fn test_localization_directory(
|
||||
e
|
||||
);
|
||||
return None;
|
||||
}
|
||||
},
|
||||
};
|
||||
let mut current_i18n = generate_key_version(
|
||||
&repo,
|
||||
@ -459,7 +460,7 @@ fn test_localization_directory(
|
||||
relfile.to_string_lossy()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
},
|
||||
};
|
||||
let ref_commit_id = match ref_state.commit_id {
|
||||
Some(c) => c,
|
||||
@ -469,7 +470,7 @@ fn test_localization_directory(
|
||||
ref_key
|
||||
);
|
||||
continue;
|
||||
}
|
||||
},
|
||||
};
|
||||
if commit_id != ref_commit_id
|
||||
&& !repo
|
||||
@ -480,18 +481,15 @@ fn test_localization_directory(
|
||||
} else {
|
||||
state.state = LocalizationState::UpToDate;
|
||||
}
|
||||
}
|
||||
},
|
||||
None => {
|
||||
current_i18n.insert(
|
||||
ref_key.to_owned(),
|
||||
LocalizationEntryState {
|
||||
key_line: None,
|
||||
chuck_line_range: None,
|
||||
commit_id: None,
|
||||
state: LocalizationState::NotFound,
|
||||
},
|
||||
);
|
||||
}
|
||||
current_i18n.insert(ref_key.to_owned(), LocalizationEntryState {
|
||||
key_line: None,
|
||||
chuck_line_range: None,
|
||||
commit_id: None,
|
||||
state: LocalizationState::NotFound,
|
||||
});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -546,7 +544,7 @@ fn test_localization_directory(
|
||||
);
|
||||
|
||||
for (state, mut lines) in state_map {
|
||||
if lines.len() == 0 {
|
||||
if lines.is_empty() {
|
||||
continue;
|
||||
}
|
||||
println!("\n\t[{:?}]", state);
|
||||
@ -597,18 +595,16 @@ fn test_localization_directory(
|
||||
Some(result)
|
||||
}
|
||||
|
||||
// `asset_path` - relative path to asset directory (should be "assets/voxygen/i18n/")
|
||||
// `root_dir` - absolute path to main repo
|
||||
// `asset_path` - relative path to asset directory (should be
|
||||
// "assets/voxygen/i18n/") `root_dir` - absolute path to main repo
|
||||
pub fn test_specific_localization(code: String, root_dir: &Path, asset_path: &Path) {
|
||||
// Relative paths from root of repo to assets
|
||||
let ref_lang_dir = asset_path.join(REFERENCE_LANG);
|
||||
let ref_manifest = ref_lang_dir.join(LANG_MANIFEST_FILE.to_string() + ".ron");
|
||||
|
||||
// Initialize Git objects
|
||||
let repo = git2::Repository::discover(&root_dir).expect(&format!(
|
||||
"Failed to open the Git repository at {:?}",
|
||||
&root_dir
|
||||
));
|
||||
let repo = git2::Repository::discover(&root_dir)
|
||||
.unwrap_or_else(|_| panic!("Failed to open the Git repository at {:?}", &root_dir));
|
||||
let head_ref = repo.head().expect("Impossible to get the HEAD reference");
|
||||
|
||||
// Read HEAD for the reference language manifest
|
||||
@ -632,7 +628,13 @@ pub fn test_specific_localization(code: String, root_dir: &Path, asset_path: &Pa
|
||||
);
|
||||
for sub_directory in loc.sub_directories.iter() {
|
||||
let subdir_path = &ref_lang_dir.join(sub_directory);
|
||||
complete_key_versions(&repo, &head_ref, &mut i18n_references, root_dir, &subdir_path);
|
||||
complete_key_versions(
|
||||
&repo,
|
||||
&head_ref,
|
||||
&mut i18n_references,
|
||||
root_dir,
|
||||
&subdir_path,
|
||||
);
|
||||
}
|
||||
|
||||
// Testing how specific language is localized
|
||||
@ -647,7 +649,6 @@ pub fn test_specific_localization(code: String, root_dir: &Path, asset_path: &Pa
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
pub fn test_all_localizations(root_dir: &Path, asset_path: &Path) {
|
||||
let ref_i18n_dir_path = asset_path.join(REFERENCE_LANG);
|
||||
let ref_i18n_path = ref_i18n_dir_path.join(LANG_MANIFEST_FILE.to_string() + ".ron");
|
||||
@ -663,10 +664,8 @@ pub fn test_all_localizations(root_dir: &Path, asset_path: &Path) {
|
||||
}
|
||||
|
||||
// Initialize Git objects
|
||||
let repo = git2::Repository::discover(&root_dir).expect(&format!(
|
||||
"Failed to open the Git repository at {:?}",
|
||||
&root_dir
|
||||
));
|
||||
let repo = git2::Repository::discover(&root_dir)
|
||||
.unwrap_or_else(|_| panic!("Failed to open the Git repository at {:?}", &root_dir));
|
||||
let head_ref = repo.head().expect("Impossible to get the HEAD reference");
|
||||
|
||||
// Read HEAD for the reference language file
|
||||
@ -681,11 +680,23 @@ pub fn test_all_localizations(root_dir: &Path, asset_path: &Path) {
|
||||
);
|
||||
|
||||
// Gathering info about keys from reference language
|
||||
complete_key_versions(&repo, &head_ref, &mut i18n_references, root_dir, &ref_i18n_dir_path);
|
||||
complete_key_versions(
|
||||
&repo,
|
||||
&head_ref,
|
||||
&mut i18n_references,
|
||||
root_dir,
|
||||
&ref_i18n_dir_path,
|
||||
);
|
||||
// read HEAD for the subfolders
|
||||
for sub_directory in loc.sub_directories.iter() {
|
||||
let subdir_path = &ref_i18n_dir_path.join(sub_directory);
|
||||
complete_key_versions(&repo, &head_ref, &mut i18n_references, root_dir, &subdir_path);
|
||||
complete_key_versions(
|
||||
&repo,
|
||||
&head_ref,
|
||||
&mut i18n_references,
|
||||
root_dir,
|
||||
&subdir_path,
|
||||
);
|
||||
}
|
||||
|
||||
// Compare to other reference files
|
||||
|
@ -15,7 +15,7 @@ fn main() {
|
||||
"--lang" => {
|
||||
let code = cli[i + 1].clone();
|
||||
analysis::test_specific_localization(code, root, asset_path);
|
||||
}
|
||||
},
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
|
@ -391,9 +391,7 @@ pub fn i18n_asset_key(language_id: &str) -> String { ["voxygen.i18n.", language_
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{LocalizationFragment, RawLocalization, LANG_MANIFEST_FILE, REFERENCE_LANG};
|
||||
use git2::Repository;
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use ron::de::{from_bytes, from_reader};
|
||||
use ron::de::from_reader;
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
@ -408,184 +406,6 @@ mod tests {
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum LocalizationState {
|
||||
UpToDate,
|
||||
NotFound,
|
||||
Outdated,
|
||||
Unknown,
|
||||
Unused,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct LocalizationEntryState {
|
||||
pub key_line: Option<usize>,
|
||||
pub chuck_line_range: Option<(usize, usize)>,
|
||||
pub commit_id: Option<git2::Oid>,
|
||||
pub state: LocalizationState,
|
||||
}
|
||||
|
||||
impl LocalizationEntryState {
|
||||
pub fn new() -> LocalizationEntryState {
|
||||
LocalizationEntryState {
|
||||
key_line: None,
|
||||
chuck_line_range: None,
|
||||
commit_id: None,
|
||||
state: LocalizationState::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the Git blob associated with the given reference and path
|
||||
#[allow(clippy::expect_fun_call)] // TODO: Pending review in #587
|
||||
fn read_file_from_path<'a>(
|
||||
repo: &'a git2::Repository,
|
||||
reference: &git2::Reference,
|
||||
path: &std::path::Path,
|
||||
) -> git2::Blob<'a> {
|
||||
let tree = reference
|
||||
.peel_to_tree()
|
||||
.expect("Impossible to peel HEAD to a tree object");
|
||||
tree.get_path(path)
|
||||
.expect(&format!(
|
||||
"Impossible to find the file {:?} in reference {:?}",
|
||||
path,
|
||||
reference.name()
|
||||
))
|
||||
.to_object(&repo)
|
||||
.unwrap()
|
||||
.peel_to_blob()
|
||||
.expect("Impossible to fetch the Git object")
|
||||
}
|
||||
|
||||
fn correspond(line: &str, key: &str) -> bool {
|
||||
let pat = {
|
||||
// Get left part of split
|
||||
let mut begin = line
|
||||
.split(':')
|
||||
.next()
|
||||
.expect("split always produces value")
|
||||
.trim()
|
||||
.chars();
|
||||
// Remove quotes
|
||||
begin.next();
|
||||
begin.next_back();
|
||||
begin.as_str()
|
||||
};
|
||||
|
||||
pat == key
|
||||
}
|
||||
|
||||
fn generate_key_version<'a>(
|
||||
repo: &'a git2::Repository,
|
||||
localization: &LocalizationFragment,
|
||||
path: &std::path::Path,
|
||||
file_blob: &git2::Blob,
|
||||
) -> HashMap<String, LocalizationEntryState> {
|
||||
let mut keys: HashMap<String, LocalizationEntryState> = localization
|
||||
.string_map
|
||||
.keys()
|
||||
.map(|k| (k.to_owned(), LocalizationEntryState::new()))
|
||||
.collect();
|
||||
let mut to_process: HashSet<&String> = localization.string_map.keys().collect();
|
||||
// Find key start lines
|
||||
let file_content = std::str::from_utf8(file_blob.content()).expect("Got non UTF-8 file");
|
||||
|
||||
for (line_nb, line) in file_content.lines().enumerate() {
|
||||
let mut found_key = None;
|
||||
|
||||
for key in to_process.iter() {
|
||||
if correspond(line, key) {
|
||||
found_key = Some(key.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(key) = found_key {
|
||||
keys.get_mut(key).unwrap().key_line = Some(line_nb);
|
||||
to_process.remove(key);
|
||||
};
|
||||
}
|
||||
|
||||
let mut error_check_set: Vec<String> = vec![];
|
||||
// Find commit for each keys
|
||||
repo.blame_file(path, None)
|
||||
.expect("Impossible to generate the Git blame")
|
||||
.iter()
|
||||
.for_each(|e: git2::BlameHunk| {
|
||||
for (key, state) in keys.iter_mut() {
|
||||
let line = match state.key_line {
|
||||
Some(l) => l,
|
||||
None => {
|
||||
if !error_check_set.contains(key) {
|
||||
eprintln!(
|
||||
"Key {} does not have a git line in it's state! Skipping key.",
|
||||
key
|
||||
);
|
||||
error_check_set.push(key.clone());
|
||||
}
|
||||
continue;
|
||||
},
|
||||
};
|
||||
|
||||
if line + 1 >= e.final_start_line()
|
||||
&& line + 1 < e.final_start_line() + e.lines_in_hunk()
|
||||
{
|
||||
state.chuck_line_range = Some((
|
||||
e.final_start_line(),
|
||||
e.final_start_line() + e.lines_in_hunk(),
|
||||
));
|
||||
state.commit_id = match state.commit_id {
|
||||
Some(existing_commit) => {
|
||||
match repo.graph_descendant_of(e.final_commit_id(), existing_commit)
|
||||
{
|
||||
Ok(true) => Some(e.final_commit_id()),
|
||||
Ok(false) => Some(existing_commit),
|
||||
Err(err) => panic!("{}", err),
|
||||
}
|
||||
},
|
||||
None => Some(e.final_commit_id()),
|
||||
};
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
keys
|
||||
}
|
||||
|
||||
fn complete_key_versions<'a>(
|
||||
repo: &'a git2::Repository,
|
||||
head_ref: &git2::Reference,
|
||||
i18n_key_versions: &mut HashMap<String, LocalizationEntryState>,
|
||||
dir: &Path,
|
||||
) {
|
||||
let root_dir = std::env::current_dir()
|
||||
.map(|p| p.parent().expect("").to_owned())
|
||||
.unwrap();
|
||||
//TODO: review unwraps in this file
|
||||
for i18n_file in root_dir.join(&dir).read_dir().unwrap().flatten() {
|
||||
if let Ok(file_type) = i18n_file.file_type() {
|
||||
if file_type.is_file() {
|
||||
let full_path = i18n_file.path();
|
||||
let path = full_path.strip_prefix(&root_dir).unwrap();
|
||||
println!("-> {:?}", i18n_file.file_name());
|
||||
let i18n_blob = read_file_from_path(&repo, &head_ref, &path);
|
||||
let i18n: LocalizationFragment = match from_bytes(i18n_blob.content()) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
eprintln!(
|
||||
"Could not parse {} RON file, skipping: {}",
|
||||
i18n_file.path().to_string_lossy(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
},
|
||||
};
|
||||
i18n_key_versions.extend(generate_key_version(&repo, &i18n, &path, &i18n_blob));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn verify_localization_directory(directory_path: &Path) {
|
||||
let root_dir = std::env::current_dir()
|
||||
.map(|p| p.parent().expect("").to_owned())
|
||||
@ -679,262 +499,11 @@ mod tests {
|
||||
#[ignore]
|
||||
#[allow(clippy::expect_fun_call)]
|
||||
fn test_all_localizations() {
|
||||
use i18n_check::analysis;
|
||||
// Generate paths
|
||||
let i18n_asset_path = Path::new("assets/voxygen/i18n/");
|
||||
let ref_i18n_dir_path = i18n_asset_path.join(REFERENCE_LANG);
|
||||
let ref_i18n_path = ref_i18n_dir_path.join(LANG_MANIFEST_FILE.to_string() + ".ron");
|
||||
let root_dir = std::env::current_dir()
|
||||
.map(|p| p.parent().expect("").to_owned())
|
||||
.unwrap();
|
||||
let i18n_path = root_dir.join(i18n_asset_path);
|
||||
|
||||
if !root_dir.join(&ref_i18n_dir_path).is_dir() {
|
||||
panic!(
|
||||
"Reference language folder not found {:?}",
|
||||
&ref_i18n_dir_path
|
||||
)
|
||||
}
|
||||
if !root_dir.join(&ref_i18n_path).is_file() {
|
||||
panic!("Reference language file not found {:?}", &ref_i18n_path)
|
||||
}
|
||||
|
||||
// Initialize Git objects
|
||||
let repo = Repository::discover(&root_dir).expect(&format!(
|
||||
"Failed to open the Git repository at {:?}",
|
||||
&root_dir
|
||||
));
|
||||
let head_ref = repo.head().expect("Impossible to get the HEAD reference");
|
||||
|
||||
// Read HEAD for the reference language file
|
||||
let i18n_ref_blob = read_file_from_path(&repo, &head_ref, &ref_i18n_path);
|
||||
let loc: RawLocalization = from_bytes(i18n_ref_blob.content())
|
||||
.expect("Expect to parse reference i18n RON file, can't proceed without it");
|
||||
let mut i18n_references: HashMap<String, LocalizationEntryState> = generate_key_version(
|
||||
&repo,
|
||||
&LocalizationFragment::from(loc.clone()),
|
||||
&ref_i18n_path,
|
||||
&i18n_ref_blob,
|
||||
);
|
||||
|
||||
// read HEAD for the fragment files
|
||||
complete_key_versions(&repo, &head_ref, &mut i18n_references, &ref_i18n_dir_path);
|
||||
// read HEAD for the subfolders
|
||||
for sub_directory in loc.sub_directories.iter() {
|
||||
let subdir_path = &ref_i18n_dir_path.join(sub_directory);
|
||||
complete_key_versions(&repo, &head_ref, &mut i18n_references, &subdir_path);
|
||||
}
|
||||
|
||||
// Compare to other reference files
|
||||
let i18n_directories = i18n_directories(&i18n_path);
|
||||
let mut i18n_entry_counts: HashMap<PathBuf, (usize, usize, usize, usize)> = HashMap::new();
|
||||
for file in &i18n_directories {
|
||||
let reldir = file.strip_prefix(&root_dir).unwrap();
|
||||
let relfile = reldir.join(&(LANG_MANIFEST_FILE.to_string() + ".ron"));
|
||||
if relfile == ref_i18n_path {
|
||||
continue;
|
||||
}
|
||||
println!("\n-----------------------------------");
|
||||
println!("{:?}", relfile);
|
||||
println!("-----------------------------------");
|
||||
|
||||
// Find the localization entry state
|
||||
let current_blob = read_file_from_path(&repo, &head_ref, &relfile);
|
||||
let current_loc: RawLocalization = match from_bytes(current_blob.content()) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
eprintln!(
|
||||
"Could not parse {} RON file, skipping: {}",
|
||||
relfile.to_string_lossy(),
|
||||
e
|
||||
);
|
||||
continue;
|
||||
},
|
||||
};
|
||||
let mut current_i18n = generate_key_version(
|
||||
&repo,
|
||||
&LocalizationFragment::from(current_loc.clone()),
|
||||
&relfile,
|
||||
¤t_blob,
|
||||
);
|
||||
// read HEAD for the fragment files
|
||||
complete_key_versions(&repo, &head_ref, &mut current_i18n, &reldir);
|
||||
// read HEAD for the subfolders
|
||||
for sub_directory in current_loc.sub_directories.iter() {
|
||||
let subdir_path = &reldir.join(sub_directory);
|
||||
complete_key_versions(&repo, &head_ref, &mut current_i18n, &subdir_path);
|
||||
}
|
||||
|
||||
for (ref_key, ref_state) in i18n_references.iter() {
|
||||
match current_i18n.get_mut(ref_key) {
|
||||
Some(state) => {
|
||||
let commit_id = match state.commit_id {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
eprintln!(
|
||||
"Commit ID of key {} in i18n file {} is missing! Skipping key.",
|
||||
ref_key,
|
||||
relfile.to_string_lossy()
|
||||
);
|
||||
continue;
|
||||
},
|
||||
};
|
||||
let ref_commit_id = match ref_state.commit_id {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
eprintln!(
|
||||
"Commit ID of key {} in reference i18n file is missing! \
|
||||
Skipping key.",
|
||||
ref_key
|
||||
);
|
||||
continue;
|
||||
},
|
||||
};
|
||||
if commit_id != ref_commit_id
|
||||
&& !repo
|
||||
.graph_descendant_of(commit_id, ref_commit_id)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
state.state = LocalizationState::Outdated;
|
||||
} else {
|
||||
state.state = LocalizationState::UpToDate;
|
||||
}
|
||||
},
|
||||
None => {
|
||||
current_i18n.insert(ref_key.to_owned(), LocalizationEntryState {
|
||||
key_line: None,
|
||||
chuck_line_range: None,
|
||||
commit_id: None,
|
||||
state: LocalizationState::NotFound,
|
||||
});
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let ref_keys: HashSet<&String> = i18n_references.keys().collect();
|
||||
for (_, state) in current_i18n
|
||||
.iter_mut()
|
||||
.filter(|&(k, _)| !ref_keys.contains(k))
|
||||
{
|
||||
state.state = LocalizationState::Unused;
|
||||
}
|
||||
|
||||
// Display
|
||||
println!(
|
||||
"\n{:10} | {:60}| {:40} | {:40}\n",
|
||||
"State",
|
||||
"Key name",
|
||||
relfile.to_str().unwrap(),
|
||||
ref_i18n_path.to_str().unwrap()
|
||||
);
|
||||
|
||||
let mut sorted_keys: Vec<&String> = current_i18n.keys().collect();
|
||||
sorted_keys.sort();
|
||||
|
||||
let current_i18n_entry_count = current_i18n.len();
|
||||
let mut uptodate_entries = 0;
|
||||
let mut outdated_entries = 0;
|
||||
let mut unused_entries = 0;
|
||||
let mut notfound_entries = 0;
|
||||
let mut unknown_entries = 0;
|
||||
|
||||
for key in sorted_keys {
|
||||
let state = current_i18n.get(key).unwrap();
|
||||
if state.state != LocalizationState::UpToDate {
|
||||
match state.state {
|
||||
LocalizationState::Outdated => outdated_entries += 1,
|
||||
LocalizationState::NotFound => notfound_entries += 1,
|
||||
LocalizationState::Unknown => unknown_entries += 1,
|
||||
LocalizationState::Unused => unused_entries += 1,
|
||||
LocalizationState::UpToDate => unreachable!(),
|
||||
};
|
||||
|
||||
println!(
|
||||
"[{:9}] | {:60}| {:40} | {:40}",
|
||||
format!("{:?}", state.state),
|
||||
key,
|
||||
state
|
||||
.commit_id
|
||||
.map(|s| format!("{}", s))
|
||||
.unwrap_or_else(|| "None".to_string()),
|
||||
i18n_references
|
||||
.get(key)
|
||||
.map(|s| s.commit_id)
|
||||
.flatten()
|
||||
.map(|s| format!("{}", s))
|
||||
.unwrap_or_else(|| "None".to_string()),
|
||||
);
|
||||
} else {
|
||||
uptodate_entries += 1;
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"\n{} up-to-date, {} outdated, {} unused, {} not found, {} unknown entries",
|
||||
uptodate_entries,
|
||||
outdated_entries,
|
||||
unused_entries,
|
||||
notfound_entries,
|
||||
unknown_entries
|
||||
);
|
||||
|
||||
// Calculate key count that actually matter for the status of the translation
|
||||
// Unused entries don't break the game
|
||||
let real_entry_count = current_i18n_entry_count - unused_entries;
|
||||
let uptodate_percent = (uptodate_entries as f32 / real_entry_count as f32) * 100_f32;
|
||||
let outdated_percent = (outdated_entries as f32 / real_entry_count as f32) * 100_f32;
|
||||
let untranslated_percent =
|
||||
((notfound_entries + unknown_entries) as f32 / real_entry_count as f32) * 100_f32;
|
||||
|
||||
println!(
|
||||
"{:.2}% up-to-date, {:.2}% outdated, {:.2}% untranslated\n",
|
||||
uptodate_percent, outdated_percent, untranslated_percent,
|
||||
);
|
||||
|
||||
i18n_entry_counts.insert(
|
||||
file.clone(),
|
||||
(
|
||||
uptodate_entries,
|
||||
outdated_entries,
|
||||
notfound_entries + unknown_entries,
|
||||
real_entry_count,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
let mut overall_uptodate_entry_count = 0;
|
||||
let mut overall_outdated_entry_count = 0;
|
||||
let mut overall_untranslated_entry_count = 0;
|
||||
let mut overall_real_entry_count = 0;
|
||||
|
||||
println!("-----------------------------------------------------------------------------");
|
||||
println!("Overall Translation Status");
|
||||
println!("-----------------------------------------------------------------------------");
|
||||
println!(
|
||||
"{:12}| {:8} | {:8} | {:8}",
|
||||
"", "up-to-date", "outdated", "untranslated"
|
||||
);
|
||||
|
||||
for (path, (uptodate, outdated, untranslated, real)) in i18n_entry_counts {
|
||||
overall_uptodate_entry_count += uptodate;
|
||||
overall_outdated_entry_count += outdated;
|
||||
overall_untranslated_entry_count += untranslated;
|
||||
overall_real_entry_count += real;
|
||||
|
||||
println!(
|
||||
"{:12}|{:8} |{:6} |{:8}",
|
||||
path.file_name().unwrap().to_string_lossy(),
|
||||
uptodate,
|
||||
outdated,
|
||||
untranslated
|
||||
);
|
||||
}
|
||||
|
||||
println!(
|
||||
"\n{:.2}% up-to-date, {:.2}% outdated, {:.2}% untranslated",
|
||||
(overall_uptodate_entry_count as f32 / overall_real_entry_count as f32) * 100_f32,
|
||||
(overall_outdated_entry_count as f32 / overall_real_entry_count as f32) * 100_f32,
|
||||
(overall_untranslated_entry_count as f32 / overall_real_entry_count as f32) * 100_f32,
|
||||
);
|
||||
println!("-----------------------------------------------------------------------------\n");
|
||||
let curr_dir = std::env::current_dir().unwrap();
|
||||
let root = curr_dir.parent().unwrap();
|
||||
analysis::test_all_localizations(&root, &i18n_asset_path);
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user