mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
move translation check to seperate ci step
note: TAGUUID is used to avoid sqlinjections by choosing a branch name
This commit is contained in:
parent
8be625c606
commit
ee704e171d
@ -11,6 +11,25 @@ unittests:
|
||||
retry:
|
||||
max: 2
|
||||
|
||||
translation:
|
||||
extends: .release
|
||||
stage: build
|
||||
image: registry.gitlab.com/veloren/veloren-docker-ci/cache/quality:${CACHE_IMAGE_TAG}
|
||||
script:
|
||||
- ln -s /dockercache/target target
|
||||
- cat ./.gitlab/scripts/translation.sh
|
||||
- source ./.gitlab/scripts/translation.sh
|
||||
- TAGUUID="Z$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1)" || echo "ignore this returncode, dont ask me why, it works"
|
||||
- echo $TAGUUID # Use TAGUUID to mitigate https://xkcd.com/327/ in the branch name
|
||||
- echo 'SET veloren.timestamp = "'"$(git show --no-patch --no-notes --pretty='%cd' HEAD)"'";' > upload.sql
|
||||
- echo "SET veloren.branch = \$${TAGUUID}\$${CI_COMMIT_REF_NAME}\$${TAGUUID}\$;" >> upload.sql
|
||||
- echo "SET veloren.sha = \$${TAGUUID}\$${CI_COMMIT_SHA}\$${TAGUUID}\$;" >> upload.sql
|
||||
- echo '\copy translations ("country_code", "file_name", "translation_key", "status", "git_commit") from '"'translation_analysis.csv' csv header" >> upload.sql
|
||||
- cat upload.sql
|
||||
- PGPASSWORD="${CIDBPASSWORD}" PGSSLROOTCERT="./.gitlab/ci-db.crt" psql "sslmode=verify-ca host=grafana.veloren.net port=15432 dbname=translations" -U hgseehzjtsrghtjdcqw -f upload.sql;
|
||||
retry:
|
||||
max: 2
|
||||
|
||||
benchmarks:
|
||||
extends: .release
|
||||
stage: build
|
||||
@ -23,7 +42,7 @@ benchmarks:
|
||||
- cat ./.gitlab/scripts/benchmark.sh
|
||||
- source ./.gitlab/scripts/benchmark.sh
|
||||
- TAGUUID="Z$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1)" || echo "ignore this returncode, dont ask me why, it works"
|
||||
- echo $TAGUUID
|
||||
- echo $TAGUUID # Use TAGUUID to mitigate https://xkcd.com/327/ in the branch name
|
||||
- echo 'SET veloren.timestamp = "'"$(git show --no-patch --no-notes --pretty='%cd' HEAD)"'";' > upload.sql
|
||||
- echo "SET veloren.branch = \$${TAGUUID}\$${CI_COMMIT_REF_NAME}\$${TAGUUID}\$;" >> upload.sql
|
||||
- echo "SET veloren.sha = \$${TAGUUID}\$${CI_COMMIT_SHA}\$${TAGUUID}\$;" >> upload.sql
|
||||
|
4
.gitlab/scripts/translation.sh
Executable file
4
.gitlab/scripts/translation.sh
Executable file
@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
export VELOREN_ASSETS="$(pwd)/assets"
|
||||
rm -r target/debug/incremental/veloren_* || echo "all good" # TMP FIX FOR 2021-03-22-nightly
|
||||
time cargo test --package veloren-voxygen-i18n --lib test_all_localizations -- --nocapture --ignored
|
@ -1,7 +1,6 @@
|
||||
#!/bin/bash
|
||||
export VELOREN_ASSETS="$(pwd)/assets"
|
||||
rm -r target/debug/incremental/veloren_* || echo "all good" # TMP FIX FOR 2021-03-22-nightly
|
||||
time cargo test --package veloren-voxygen-i18n --lib test_all_localizations -- --nocapture --ignored &&
|
||||
time cargo test --package veloren-common-assets asset_tweak::tests --features asset_tweak --lib &&
|
||||
( rm -r target/debug/incremental* || echo "all good" ) && # TMP FIX FOR 2021-03-22-nightly
|
||||
time cargo test
|
@ -5,7 +5,7 @@ use crate::{
|
||||
path::{BasePath, LangPath},
|
||||
raw::{self, RawFragment, RawLanguage},
|
||||
stats::{
|
||||
print_csv_file, print_overall_stats, print_translation_stats, LocalizationAnalysis,
|
||||
print_csv_stats, print_overall_stats, print_translation_stats, LocalizationAnalysis,
|
||||
LocalizationStats,
|
||||
},
|
||||
REFERENCE_LANG,
|
||||
@ -167,12 +167,10 @@ fn gather_results(
|
||||
Some(LocalizationState::Unused) => stats.unused_entries += 1,
|
||||
Some(LocalizationState::UpToDate) => stats.uptodate_entries += 1,
|
||||
};
|
||||
if entry.state != Some(LocalizationState::UpToDate) {
|
||||
let state_keys = state_map.data.get_mut(&entry.state).expect("prefiled");
|
||||
state_keys.push((file.clone(), key.to_owned(), entry.commit_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (_, entries) in state_map.data.iter_mut() {
|
||||
entries.sort();
|
||||
@ -209,10 +207,13 @@ pub fn test_specific_localizations(
|
||||
analysis.insert(language_identifier.to_owned(), (state_map, stats));
|
||||
}
|
||||
|
||||
let output = path.root_path().join("translation_analysis.csv");
|
||||
let mut f = std::fs::File::create(output).expect("couldn't write csv file");
|
||||
|
||||
//printing
|
||||
for (language_identifier, (state_map, stats)) in &analysis {
|
||||
if csv_enabled {
|
||||
print_csv_file(state_map);
|
||||
print_csv_stats(state_map, &mut f);
|
||||
} else {
|
||||
print_translation_stats(
|
||||
language_identifier,
|
||||
|
@ -355,6 +355,6 @@ mod tests {
|
||||
fn test_all_localizations() {
|
||||
// Generate paths
|
||||
let root_dir = common_assets::find_root().expect("Failed to discover repository root");
|
||||
crate::analysis::test_all_localizations(&BasePath::new(&root_dir), true, false);
|
||||
crate::analysis::test_all_localizations(&BasePath::new(&root_dir), true, true);
|
||||
}
|
||||
}
|
||||
|
@ -109,6 +109,8 @@ fn recursive_fragments_paths_in_language(
|
||||
subfolder: &Path,
|
||||
result: &mut Vec<PathBuf>,
|
||||
) -> Result<(), std::io::Error> {
|
||||
let manifest_path = PathBuf::from(&format!("{}.{}", LANG_MANIFEST_FILE, LANG_EXTENSION));
|
||||
let template_path = PathBuf::from(&format!("{}.{}", "template", LANG_EXTENSION));
|
||||
let search_dir = lpath.sub_path(subfolder);
|
||||
for fragment_file in search_dir.read_dir()?.flatten() {
|
||||
let file_type = fragment_file.file_type()?;
|
||||
@ -117,7 +119,8 @@ fn recursive_fragments_paths_in_language(
|
||||
if file_type.is_dir() {
|
||||
recursive_fragments_paths_in_language(lpath, relative_path, result)?;
|
||||
} else if file_type.is_file()
|
||||
&& relative_path != Path::new(&format!("{}.{}", LANG_MANIFEST_FILE, LANG_EXTENSION))
|
||||
&& relative_path != manifest_path
|
||||
&& relative_path != template_path
|
||||
{
|
||||
result.push(relative_path.to_path_buf());
|
||||
}
|
||||
|
@ -31,22 +31,21 @@ impl LocalizationStats {
|
||||
impl LocalizationAnalysis {
|
||||
pub(crate) fn new(language_identifier: &str) -> Self {
|
||||
let mut data = HashMap::new();
|
||||
data.insert(Some(LocalizationState::UpToDate), vec![]);
|
||||
data.insert(Some(LocalizationState::NotFound), vec![]);
|
||||
data.insert(Some(LocalizationState::Unused), vec![]);
|
||||
data.insert(Some(LocalizationState::Outdated), vec![]);
|
||||
data.insert(None, vec![]);
|
||||
for key in ALL_LOCALIZATION_STATES.iter() {
|
||||
data.insert(*key, vec![]);
|
||||
}
|
||||
Self {
|
||||
language_identifier: language_identifier.to_owned(),
|
||||
data,
|
||||
}
|
||||
}
|
||||
|
||||
fn show(
|
||||
fn show<W: std::io::Write>(
|
||||
&self,
|
||||
state: Option<LocalizationState>,
|
||||
ref_language: &RawLanguage<LocalizationEntryState>,
|
||||
be_verbose: bool,
|
||||
output: &mut W,
|
||||
) {
|
||||
let entries = self.data.get(&state).unwrap_or_else(|| {
|
||||
panic!(
|
||||
@ -57,7 +56,7 @@ impl LocalizationAnalysis {
|
||||
if entries.is_empty() {
|
||||
return;
|
||||
}
|
||||
println!("\n\t[{}]", LocalizationState::print(&state));
|
||||
writeln!(output, "\n\t[{}]", LocalizationState::print(&state)).unwrap();
|
||||
for (path, key, commit_id) in entries {
|
||||
if be_verbose {
|
||||
let our_commit = LocalizationAnalysis::print_commit(commit_id);
|
||||
@ -68,28 +67,30 @@ impl LocalizationAnalysis {
|
||||
.and_then(|s| s.commit_id)
|
||||
.map(|s| format!("{}", s))
|
||||
.unwrap_or_else(|| "None".to_owned());
|
||||
println!("{:60}| {:40} | {:40}", key, our_commit, ref_commit,);
|
||||
writeln!(output, "{:60}| {:40} | {:40}", key, our_commit, ref_commit).unwrap();
|
||||
} else {
|
||||
println!("{}", key);
|
||||
writeln!(output, "{}", key).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn csv(&self, state: Option<LocalizationState>) {
|
||||
fn csv<W: std::io::Write>(&self, state: Option<LocalizationState>, output: &mut W) {
|
||||
let entries = self
|
||||
.data
|
||||
.get(&state)
|
||||
.unwrap_or_else(|| panic!("called on invalid state: {:?}", state));
|
||||
for (path, key, commit_id) in entries {
|
||||
let our_commit = LocalizationAnalysis::print_commit(commit_id);
|
||||
println!(
|
||||
writeln!(
|
||||
output,
|
||||
"{},{:?},{},{},{}",
|
||||
self.language_identifier,
|
||||
path,
|
||||
key,
|
||||
LocalizationState::print(&state),
|
||||
our_commit
|
||||
);
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@ -126,7 +127,7 @@ pub(crate) fn print_translation_stats(
|
||||
if state == &Some(LocalizationState::UpToDate) {
|
||||
continue;
|
||||
}
|
||||
state_map.show(*state, ref_language, be_verbose);
|
||||
state_map.show(*state, ref_language, be_verbose, &mut std::io::stdout());
|
||||
}
|
||||
|
||||
println!(
|
||||
@ -144,14 +145,15 @@ pub(crate) fn print_translation_stats(
|
||||
);
|
||||
}
|
||||
|
||||
pub(crate) fn print_csv_file(state_map: &LocalizationAnalysis) {
|
||||
println!("country_code,file_name,translation_code,status,git_commit");
|
||||
pub(crate) fn print_csv_stats<W: std::io::Write>(state_map: &LocalizationAnalysis, output: &mut W) {
|
||||
writeln!(
|
||||
output,
|
||||
"country_code,file_name,translation_key,status,git_commit"
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
for state in &ALL_LOCALIZATION_STATES {
|
||||
if state == &Some(LocalizationState::UpToDate) {
|
||||
continue;
|
||||
}
|
||||
state_map.csv(*state);
|
||||
state_map.csv(*state, output);
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user