move translation check to seperate ci step

note: TAGUUID is used to avoid sqlinjections by choosing a branch name
This commit is contained in:
Marcel Märtens 2021-08-02 11:05:05 +02:00
parent 8be625c606
commit ee704e171d
7 changed files with 57 additions and 29 deletions

View File

@ -11,6 +11,25 @@ unittests:
retry: retry:
max: 2 max: 2
translation:
extends: .release
stage: build
image: registry.gitlab.com/veloren/veloren-docker-ci/cache/quality:${CACHE_IMAGE_TAG}
script:
- ln -s /dockercache/target target
- cat ./.gitlab/scripts/translation.sh
- source ./.gitlab/scripts/translation.sh
- TAGUUID="Z$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1)" || echo "ignore this returncode, dont ask me why, it works"
- echo $TAGUUID # Use TAGUUID to mitigate https://xkcd.com/327/ in the branch name
- echo 'SET veloren.timestamp = "'"$(git show --no-patch --no-notes --pretty='%cd' HEAD)"'";' > upload.sql
- echo "SET veloren.branch = \$${TAGUUID}\$${CI_COMMIT_REF_NAME}\$${TAGUUID}\$;" >> upload.sql
- echo "SET veloren.sha = \$${TAGUUID}\$${CI_COMMIT_SHA}\$${TAGUUID}\$;" >> upload.sql
- echo '\copy translations ("country_code", "file_name", "translation_key", "status", "git_commit") from '"'translation_analysis.csv' csv header" >> upload.sql
- cat upload.sql
- PGPASSWORD="${CIDBPASSWORD}" PGSSLROOTCERT="./.gitlab/ci-db.crt" psql "sslmode=verify-ca host=grafana.veloren.net port=15432 dbname=translations" -U hgseehzjtsrghtjdcqw -f upload.sql;
retry:
max: 2
benchmarks: benchmarks:
extends: .release extends: .release
stage: build stage: build
@ -23,7 +42,7 @@ benchmarks:
- cat ./.gitlab/scripts/benchmark.sh - cat ./.gitlab/scripts/benchmark.sh
- source ./.gitlab/scripts/benchmark.sh - source ./.gitlab/scripts/benchmark.sh
- TAGUUID="Z$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1)" || echo "ignore this returncode, dont ask me why, it works" - TAGUUID="Z$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1)" || echo "ignore this returncode, dont ask me why, it works"
- echo $TAGUUID - echo $TAGUUID # Use TAGUUID to mitigate https://xkcd.com/327/ in the branch name
- echo 'SET veloren.timestamp = "'"$(git show --no-patch --no-notes --pretty='%cd' HEAD)"'";' > upload.sql - echo 'SET veloren.timestamp = "'"$(git show --no-patch --no-notes --pretty='%cd' HEAD)"'";' > upload.sql
- echo "SET veloren.branch = \$${TAGUUID}\$${CI_COMMIT_REF_NAME}\$${TAGUUID}\$;" >> upload.sql - echo "SET veloren.branch = \$${TAGUUID}\$${CI_COMMIT_REF_NAME}\$${TAGUUID}\$;" >> upload.sql
- echo "SET veloren.sha = \$${TAGUUID}\$${CI_COMMIT_SHA}\$${TAGUUID}\$;" >> upload.sql - echo "SET veloren.sha = \$${TAGUUID}\$${CI_COMMIT_SHA}\$${TAGUUID}\$;" >> upload.sql

4
.gitlab/scripts/translation.sh Executable file
View File

@ -0,0 +1,4 @@
#!/bin/bash
export VELOREN_ASSETS="$(pwd)/assets"
rm -r target/debug/incremental/veloren_* || echo "all good" # TMP FIX FOR 2021-03-22-nightly
time cargo test --package veloren-voxygen-i18n --lib test_all_localizations -- --nocapture --ignored

View File

@ -1,7 +1,6 @@
#!/bin/bash #!/bin/bash
export VELOREN_ASSETS="$(pwd)/assets" export VELOREN_ASSETS="$(pwd)/assets"
rm -r target/debug/incremental/veloren_* || echo "all good" # TMP FIX FOR 2021-03-22-nightly rm -r target/debug/incremental/veloren_* || echo "all good" # TMP FIX FOR 2021-03-22-nightly
time cargo test --package veloren-voxygen-i18n --lib test_all_localizations -- --nocapture --ignored &&
time cargo test --package veloren-common-assets asset_tweak::tests --features asset_tweak --lib && time cargo test --package veloren-common-assets asset_tweak::tests --features asset_tweak --lib &&
( rm -r target/debug/incremental* || echo "all good" ) && # TMP FIX FOR 2021-03-22-nightly ( rm -r target/debug/incremental* || echo "all good" ) && # TMP FIX FOR 2021-03-22-nightly
time cargo test time cargo test

View File

@ -5,7 +5,7 @@ use crate::{
path::{BasePath, LangPath}, path::{BasePath, LangPath},
raw::{self, RawFragment, RawLanguage}, raw::{self, RawFragment, RawLanguage},
stats::{ stats::{
print_csv_file, print_overall_stats, print_translation_stats, LocalizationAnalysis, print_csv_stats, print_overall_stats, print_translation_stats, LocalizationAnalysis,
LocalizationStats, LocalizationStats,
}, },
REFERENCE_LANG, REFERENCE_LANG,
@ -167,12 +167,10 @@ fn gather_results(
Some(LocalizationState::Unused) => stats.unused_entries += 1, Some(LocalizationState::Unused) => stats.unused_entries += 1,
Some(LocalizationState::UpToDate) => stats.uptodate_entries += 1, Some(LocalizationState::UpToDate) => stats.uptodate_entries += 1,
}; };
if entry.state != Some(LocalizationState::UpToDate) {
let state_keys = state_map.data.get_mut(&entry.state).expect("prefiled"); let state_keys = state_map.data.get_mut(&entry.state).expect("prefiled");
state_keys.push((file.clone(), key.to_owned(), entry.commit_id)); state_keys.push((file.clone(), key.to_owned(), entry.commit_id));
} }
} }
}
for (_, entries) in state_map.data.iter_mut() { for (_, entries) in state_map.data.iter_mut() {
entries.sort(); entries.sort();
@ -209,10 +207,13 @@ pub fn test_specific_localizations(
analysis.insert(language_identifier.to_owned(), (state_map, stats)); analysis.insert(language_identifier.to_owned(), (state_map, stats));
} }
let output = path.root_path().join("translation_analysis.csv");
let mut f = std::fs::File::create(output).expect("couldn't write csv file");
//printing //printing
for (language_identifier, (state_map, stats)) in &analysis { for (language_identifier, (state_map, stats)) in &analysis {
if csv_enabled { if csv_enabled {
print_csv_file(state_map); print_csv_stats(state_map, &mut f);
} else { } else {
print_translation_stats( print_translation_stats(
language_identifier, language_identifier,

View File

@ -355,6 +355,6 @@ mod tests {
fn test_all_localizations() { fn test_all_localizations() {
// Generate paths // Generate paths
let root_dir = common_assets::find_root().expect("Failed to discover repository root"); let root_dir = common_assets::find_root().expect("Failed to discover repository root");
crate::analysis::test_all_localizations(&BasePath::new(&root_dir), true, false); crate::analysis::test_all_localizations(&BasePath::new(&root_dir), true, true);
} }
} }

View File

@ -109,6 +109,8 @@ fn recursive_fragments_paths_in_language(
subfolder: &Path, subfolder: &Path,
result: &mut Vec<PathBuf>, result: &mut Vec<PathBuf>,
) -> Result<(), std::io::Error> { ) -> Result<(), std::io::Error> {
let manifest_path = PathBuf::from(&format!("{}.{}", LANG_MANIFEST_FILE, LANG_EXTENSION));
let template_path = PathBuf::from(&format!("{}.{}", "template", LANG_EXTENSION));
let search_dir = lpath.sub_path(subfolder); let search_dir = lpath.sub_path(subfolder);
for fragment_file in search_dir.read_dir()?.flatten() { for fragment_file in search_dir.read_dir()?.flatten() {
let file_type = fragment_file.file_type()?; let file_type = fragment_file.file_type()?;
@ -117,7 +119,8 @@ fn recursive_fragments_paths_in_language(
if file_type.is_dir() { if file_type.is_dir() {
recursive_fragments_paths_in_language(lpath, relative_path, result)?; recursive_fragments_paths_in_language(lpath, relative_path, result)?;
} else if file_type.is_file() } else if file_type.is_file()
&& relative_path != Path::new(&format!("{}.{}", LANG_MANIFEST_FILE, LANG_EXTENSION)) && relative_path != manifest_path
&& relative_path != template_path
{ {
result.push(relative_path.to_path_buf()); result.push(relative_path.to_path_buf());
} }

View File

@ -31,22 +31,21 @@ impl LocalizationStats {
impl LocalizationAnalysis { impl LocalizationAnalysis {
pub(crate) fn new(language_identifier: &str) -> Self { pub(crate) fn new(language_identifier: &str) -> Self {
let mut data = HashMap::new(); let mut data = HashMap::new();
data.insert(Some(LocalizationState::UpToDate), vec![]); for key in ALL_LOCALIZATION_STATES.iter() {
data.insert(Some(LocalizationState::NotFound), vec![]); data.insert(*key, vec![]);
data.insert(Some(LocalizationState::Unused), vec![]); }
data.insert(Some(LocalizationState::Outdated), vec![]);
data.insert(None, vec![]);
Self { Self {
language_identifier: language_identifier.to_owned(), language_identifier: language_identifier.to_owned(),
data, data,
} }
} }
fn show( fn show<W: std::io::Write>(
&self, &self,
state: Option<LocalizationState>, state: Option<LocalizationState>,
ref_language: &RawLanguage<LocalizationEntryState>, ref_language: &RawLanguage<LocalizationEntryState>,
be_verbose: bool, be_verbose: bool,
output: &mut W,
) { ) {
let entries = self.data.get(&state).unwrap_or_else(|| { let entries = self.data.get(&state).unwrap_or_else(|| {
panic!( panic!(
@ -57,7 +56,7 @@ impl LocalizationAnalysis {
if entries.is_empty() { if entries.is_empty() {
return; return;
} }
println!("\n\t[{}]", LocalizationState::print(&state)); writeln!(output, "\n\t[{}]", LocalizationState::print(&state)).unwrap();
for (path, key, commit_id) in entries { for (path, key, commit_id) in entries {
if be_verbose { if be_verbose {
let our_commit = LocalizationAnalysis::print_commit(commit_id); let our_commit = LocalizationAnalysis::print_commit(commit_id);
@ -68,28 +67,30 @@ impl LocalizationAnalysis {
.and_then(|s| s.commit_id) .and_then(|s| s.commit_id)
.map(|s| format!("{}", s)) .map(|s| format!("{}", s))
.unwrap_or_else(|| "None".to_owned()); .unwrap_or_else(|| "None".to_owned());
println!("{:60}| {:40} | {:40}", key, our_commit, ref_commit,); writeln!(output, "{:60}| {:40} | {:40}", key, our_commit, ref_commit).unwrap();
} else { } else {
println!("{}", key); writeln!(output, "{}", key).unwrap();
} }
} }
} }
fn csv(&self, state: Option<LocalizationState>) { fn csv<W: std::io::Write>(&self, state: Option<LocalizationState>, output: &mut W) {
let entries = self let entries = self
.data .data
.get(&state) .get(&state)
.unwrap_or_else(|| panic!("called on invalid state: {:?}", state)); .unwrap_or_else(|| panic!("called on invalid state: {:?}", state));
for (path, key, commit_id) in entries { for (path, key, commit_id) in entries {
let our_commit = LocalizationAnalysis::print_commit(commit_id); let our_commit = LocalizationAnalysis::print_commit(commit_id);
println!( writeln!(
output,
"{},{:?},{},{},{}", "{},{:?},{},{},{}",
self.language_identifier, self.language_identifier,
path, path,
key, key,
LocalizationState::print(&state), LocalizationState::print(&state),
our_commit our_commit
); )
.unwrap();
} }
} }
@ -126,7 +127,7 @@ pub(crate) fn print_translation_stats(
if state == &Some(LocalizationState::UpToDate) { if state == &Some(LocalizationState::UpToDate) {
continue; continue;
} }
state_map.show(*state, ref_language, be_verbose); state_map.show(*state, ref_language, be_verbose, &mut std::io::stdout());
} }
println!( println!(
@ -144,14 +145,15 @@ pub(crate) fn print_translation_stats(
); );
} }
pub(crate) fn print_csv_file(state_map: &LocalizationAnalysis) { pub(crate) fn print_csv_stats<W: std::io::Write>(state_map: &LocalizationAnalysis, output: &mut W) {
println!("country_code,file_name,translation_code,status,git_commit"); writeln!(
output,
"country_code,file_name,translation_key,status,git_commit"
)
.unwrap();
for state in &ALL_LOCALIZATION_STATES { for state in &ALL_LOCALIZATION_STATES {
if state == &Some(LocalizationState::UpToDate) { state_map.csv(*state, output);
continue;
}
state_map.csv(*state);
} }
} }