mediawiki/extensions/LinkedWiki: main (log #1182594)

sourcepatches

This run took 12 seconds.

From a8f063e5bb8c51f51ced5f0fdf6beeeae9d067a4 Mon Sep 17 00:00:00 2001
From: libraryupgrader <tools.libraryupgrader@tools.wmflabs.org>
Date: Sun, 17 Mar 2024 19:52:54 +0000
Subject: [PATCH] build: Updating mediawiki/mediawiki-codesniffer to 43.0.0

The following sniffs now pass and were enabled:
* MediaWiki.AlternativeSyntax.AlternativeSyntax.AlternativeSyntax

Change-Id: I8b8b49ae67871ba5f9d11e85d042714697220727
---
 .phpcs.xml                         |  1 -
 LinkedWikiStatus.php               |  2 +-
 composer.json                      |  7 ++-
 job/InvalidatePageWithQueryJob.php | 11 ++--
 lua/LinkedWikiLuaLibrary.php       |  2 +-
 parser/SparqlParser.php            |  2 +-
 specialpages/SpecialRDFSave.php    | 86 +++++++++++++++---------------
 specialpages/SpecialRDFUnit.php    | 40 +++++++-------
 tag/RDFTag.php                     |  6 +--
 9 files changed, 81 insertions(+), 76 deletions(-)

diff --git a/.phpcs.xml b/.phpcs.xml
index ab0ec1c..325cd26 100644
--- a/.phpcs.xml
+++ b/.phpcs.xml
@@ -1,7 +1,6 @@
 <?xml version="1.0"?>
 <ruleset>
 	<rule ref="./vendor/mediawiki/mediawiki-codesniffer/MediaWiki">
-		<exclude name="MediaWiki.AlternativeSyntax.AlternativeSyntax.AlternativeSyntax" />
 		<exclude name="MediaWiki.Commenting.FunctionComment.MissingDocumentationPrivate" />
 		<exclude name="MediaWiki.Commenting.PropertyDocumentation.MissingDocumentationPrivate" />
 		<exclude name="MediaWiki.Commenting.PropertyDocumentation.MissingVar" />
diff --git a/LinkedWikiStatus.php b/LinkedWikiStatus.php
index 2359e21..9be00e5 100644
--- a/LinkedWikiStatus.php
+++ b/LinkedWikiStatus.php
@@ -392,7 +392,7 @@ class LinkedWikiStatus {
 				'page_props'
 			],
 			[
-				'page_id','page_title'
+				'page_id', 'page_title'
 			],
 			[
 				'pp_propname' => self::PAGEPROP_WRITER_TAG,
diff --git a/composer.json b/composer.json
index 938a4bd..4d141df 100644
--- a/composer.json
+++ b/composer.json
@@ -13,7 +13,7 @@
 		}
 	],
 	"require-dev": {
-		"mediawiki/mediawiki-codesniffer": "41.0.0",
+		"mediawiki/mediawiki-codesniffer": "43.0.0",
 		"mediawiki/minus-x": "1.1.1",
 		"php-parallel-lint/php-console-highlighter": "1.0.0",
 		"php-parallel-lint/php-parallel-lint": "1.3.2"
@@ -29,5 +29,10 @@
 			"phpcbf"
 		],
 		"phpcs": "phpcs -sp --cache"
+	},
+	"config": {
+		"allow-plugins": {
+			"dealerdirect/phpcodesniffer-composer-installer": true
+		}
 	}
 }
diff --git a/job/InvalidatePageWithQueryJob.php b/job/InvalidatePageWithQueryJob.php
index 6ecd6db..d9b84f7 100644
--- a/job/InvalidatePageWithQueryJob.php
+++ b/job/InvalidatePageWithQueryJob.php
@@ -50,16 +50,14 @@ class InvalidatePageWithQueryJob extends Job {
 							'pp_value' => true
 						],
 						LIST_AND
-					)
-					,
+					),
 					$dbr->makeList(
 						[
 							'pp_propname' => LinkedWikiStatus::PAGEPROP_READER_MODULE,
 							'pp_value' => true
 						],
 						LIST_AND
-					)
-					,
+					),
 					$dbr->makeList(
 						[
 							'pp_propname' => LinkedWikiStatus::PAGEPROP_WRITER_MODULE,
@@ -98,7 +96,7 @@ class InvalidatePageWithQueryJob extends Job {
 				'page_props'
 			],
 			[
-				'page_id','page_title'
+				'page_id', 'page_title'
 			],
 			[
 				$dbr->makeList( [
@@ -108,8 +106,7 @@ class InvalidatePageWithQueryJob extends Job {
 							'pp_value' => true
 						],
 						LIST_AND
-					)
-					,
+					),
 					$dbr->makeList(
 						[
 							'pp_propname' => LinkedWikiStatus::PAGEPROP_WRITER_MODULE,
diff --git a/lua/LinkedWikiLuaLibrary.php b/lua/LinkedWikiLuaLibrary.php
index dc4cc14..eccc800 100644
--- a/lua/LinkedWikiLuaLibrary.php
+++ b/lua/LinkedWikiLuaLibrary.php
@@ -116,7 +116,7 @@ class LinkedWikiLuaLibrary extends Scribunto_LuaLibraryBase {
 	 * ]
 	 */
 	public function setConfig( $urlConfig = null ) {
-		try{
+		try {
 			if ( empty( $this->objConfig ) ) {
 				$this->objConfig = new LinkedWikiConfig( $urlConfig );
 			} elseif ( $this->objConfig->getConfigEndpoint() !== $urlConfig ) {
diff --git a/parser/SparqlParser.php b/parser/SparqlParser.php
index da9ab69..4316eef 100644
--- a/parser/SparqlParser.php
+++ b/parser/SparqlParser.php
@@ -904,7 +904,7 @@ class SparqlParser {
 		$classHeaders = '',
 		$headers = '' ) {
 		// error Exception caught: Request URL not set when push a page with a query
-		try{
+		try {
 			$today = date( wfMessage( 'linkedwiki-date' )->text() );
 			if ( empty( $parser ) ) {
 				return $today . " -- " .
diff --git a/specialpages/SpecialRDFSave.php b/specialpages/SpecialRDFSave.php
index c590b6e..d8c1ef7 100644
--- a/specialpages/SpecialRDFSave.php
+++ b/specialpages/SpecialRDFSave.php
@@ -127,10 +127,10 @@ EOT
 			);
 
 			if ( !empty( $deleteData ) ) {
-				try{
+				try {
 					LinkedWikiStatus::clearJobsInDatabase();
 					$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
-				}catch ( Exception $e ) {
+				} catch ( Exception $e ) {
 					$output->addHTML(
 						"There are errors. You need to fix the problem before trying to refresh the wiki."
 					);
@@ -149,59 +149,59 @@ EOT
 		}
 		// show all pages
 		if ( !empty( $refreshWikiPage ) ) {
-				try{
-					LinkedWikiStatus::clearJobsInDatabase();
-					if ( !empty( $configDefaultSaveData ) ) {
-						$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
-					}
-					$output->addHTML( LinkedWikiStatus::invalidateAllPages() );
+			try {
+				LinkedWikiStatus::clearJobsInDatabase();
+				if ( !empty( $configDefaultSaveData ) ) {
+					$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
+				}
+				$output->addHTML( LinkedWikiStatus::invalidateAllPages() );
 
-					// phpcs:disable
-					$output->addHTML(
-						<<<EOT
+				// phpcs:disable
+				$output->addHTML(
+					<<<EOT
 <br/>When all the tasks are done, the wiki will be up to date.
 You can follow the number of jobs remaining by clicking on the button "Refresh status of jobs"
 EOT
-					);
-					// phpcs:enable
-				}catch ( Exception $e ) {
-					$output->addHTML(
-						"There are errors. You need to fix the problem before trying to refresh the wiki."
-					);
-					$output->addHTML( "<br/>Error: <pre>" . htmlentities( $e->getMessage() ) . "</pre>" );
-					$this->endSpecialPage();
-					return;
-				}
+				);
+				// phpcs:enable
+			} catch ( Exception $e ) {
+				$output->addHTML(
+					"There are errors. You need to fix the problem before trying to refresh the wiki."
+				);
+				$output->addHTML( "<br/>Error: <pre>" . htmlentities( $e->getMessage() ) . "</pre>" );
+				$this->endSpecialPage();
+				return;
+			}
 			// not lazyPush
 			$jobQueueGroup->push( new InvalidatePageWithQueryJob() );
 		}
 
 		if ( !empty( $refreshData ) ) {
-				try{
-					// save all RDF tags
-					if ( !empty( $configDefaultSaveData ) ) {
-						LinkedWikiStatus::clearJobsInDatabase();
-						$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
-						$output->addHTML( LinkedWikiStatus::loadAllTagsRDFInPage() );
-					}
-					$jobQueueGroup->lazyPush( new InvalidatePageWithQueryJob() );
-					// phpcs:disable
-					$output->addHTML(
-						<<<EOT
+			try {
+				// save all RDF tags
+				if ( !empty( $configDefaultSaveData ) ) {
+					LinkedWikiStatus::clearJobsInDatabase();
+					$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
+					$output->addHTML( LinkedWikiStatus::loadAllTagsRDFInPage() );
+				}
+				$jobQueueGroup->lazyPush( new InvalidatePageWithQueryJob() );
+				// phpcs:disable
+				$output->addHTML(
+					<<<EOT
 <br/>When all the tasks are done, the wiki will be up to date.
 You can follow the number of jobs remaining by clicking on the button "Refresh status of jobs"
 EOT
-					);
-					// phpcs:enable
+				);
+				// phpcs:enable
 
-				}catch ( Exception $e ) {
-					$output->addHTML(
-						"There are errors. You need to fix the problem before trying to refresh the RDF database."
-					);
-					$output->addHTML( "<br/>Error: <pre>" . htmlentities( $e->getMessage() ) . "</pre>" );
-					$this->endSpecialPage();
-					return;
-				}
+			} catch ( Exception $e ) {
+				$output->addHTML(
+					"There are errors. You need to fix the problem before trying to refresh the RDF database."
+				);
+				$output->addHTML( "<br/>Error: <pre>" . htmlentities( $e->getMessage() ) . "</pre>" );
+				$this->endSpecialPage();
+				return;
+			}
 
 			// not lazyPush
 			$jobQueueGroup->push( new InvalidatePageWithQueryJob() );
@@ -359,7 +359,7 @@ EOT;
 			],
 			[
 				'page_touched', 'page_links_updated', 'page_namespace', 'page_title',
-				'GROUP_CONCAT(pp_propname) as props','GROUP_CONCAT(job_id) as jobs'
+				'GROUP_CONCAT(pp_propname) as props', 'GROUP_CONCAT(job_id) as jobs'
 			],
 			[
 				$dbr->makeList( [
diff --git a/specialpages/SpecialRDFUnit.php b/specialpages/SpecialRDFUnit.php
index cbdb6b0..631d8e2 100644
--- a/specialpages/SpecialRDFUnit.php
+++ b/specialpages/SpecialRDFUnit.php
@@ -13,18 +13,18 @@
 
 use MediaWiki\MediaWikiServices;
 
-defined( 'HTTP_URL_REPLACE' ) || define( 'HTTP_URL_REPLACE',        0 );
-defined( 'HTTP_URL_JOIN_PATH' ) || define( 'HTTP_URL_JOIN_PATH',      1 );
-defined( 'HTTP_URL_JOIN_QUERY' ) || define( 'HTTP_URL_JOIN_QUERY',     2 );
-defined( 'HTTP_URL_STRIP_USER' ) || define( 'HTTP_URL_STRIP_USER',     4 );
-defined( 'HTTP_URL_STRIP_PASS' ) || define( 'HTTP_URL_STRIP_PASS',     8 );
-defined( 'HTTP_URL_STRIP_AUTH' ) || define( 'HTTP_URL_STRIP_AUTH',     12 );
-defined( 'HTTP_URL_STRIP_PORT' ) || define( 'HTTP_URL_STRIP_PORT',     32 );
-defined( 'HTTP_URL_STRIP_PATH' ) || define( 'HTTP_URL_STRIP_PATH',     64 );
-defined( 'HTTP_URL_STRIP_QUERY' ) || define( 'HTTP_URL_STRIP_QUERY',    128 );
+defined( 'HTTP_URL_REPLACE' ) || define( 'HTTP_URL_REPLACE', 0 );
+defined( 'HTTP_URL_JOIN_PATH' ) || define( 'HTTP_URL_JOIN_PATH', 1 );
+defined( 'HTTP_URL_JOIN_QUERY' ) || define( 'HTTP_URL_JOIN_QUERY', 2 );
+defined( 'HTTP_URL_STRIP_USER' ) || define( 'HTTP_URL_STRIP_USER', 4 );
+defined( 'HTTP_URL_STRIP_PASS' ) || define( 'HTTP_URL_STRIP_PASS', 8 );
+defined( 'HTTP_URL_STRIP_AUTH' ) || define( 'HTTP_URL_STRIP_AUTH', 12 );
+defined( 'HTTP_URL_STRIP_PORT' ) || define( 'HTTP_URL_STRIP_PORT', 32 );
+defined( 'HTTP_URL_STRIP_PATH' ) || define( 'HTTP_URL_STRIP_PATH', 64 );
+defined( 'HTTP_URL_STRIP_QUERY' ) || define( 'HTTP_URL_STRIP_QUERY', 128 );
 defined( 'HTTP_URL_STRIP_FRAGMENT' ) || define( 'HTTP_URL_STRIP_FRAGMENT', 256 );
-defined( 'HTTP_URL_STRIP_ALL' ) || define( 'HTTP_URL_STRIP_ALL',      492 );
-if ( !function_exists( 'http_build_str' ) ) :
+defined( 'HTTP_URL_STRIP_ALL' ) || define( 'HTTP_URL_STRIP_ALL', 492 );
+if ( !function_exists( 'http_build_str' ) ) {
 
 	/**
 	 * Build query string
@@ -54,9 +54,9 @@ if ( !function_exists( 'http_build_str' ) ) :
 		return implode( $arg_separator, $out );
 	}
 
-endif;
+}
 
-if ( !function_exists( 'http_build_url' ) ) :
+if ( !function_exists( 'http_build_url' ) ) {
 
 	/**
 	 * Build a URL
@@ -113,7 +113,8 @@ if ( !function_exists( 'http_build_url' ) ) :
 			$d_path = $defaults['path'];
 			$u_path = ( isset( $url['path'] ) ? $url['path'] : '' );
 			$p_path = ( isset( $parts['path'] ) ? $parts['path'] : '' );
-			if ( $p_path ) { $u_path = '';
+			if ( $p_path ) {
+				$u_path = '';
 			}
 			$path = $d_path;
 			if ( isset( $url['host'] ) && !$p_path ) {
@@ -142,7 +143,8 @@ if ( !function_exists( 'http_build_url' ) ) :
 				} elseif ( $v == '.' ) {
 					unset( $path[$k] );
 
-				} else { $k_stack[] = $k;
+				} else {
+					$k_stack[] = $k;
 				}
 			}
 			$path = implode( '/', $path );
@@ -159,9 +161,11 @@ if ( !function_exists( 'http_build_url' ) ) :
 			if ( is_array( $p_query ) ) {
 				$query = $u_query;
 			} elseif ( $JOIN_QUERY ) {
-				if ( !is_array( $u_query ) ) { parse_str( $u_query, $u_query );
+				if ( !is_array( $u_query ) ) {
+					parse_str( $u_query, $u_query );
 				}
-				if ( !is_array( $p_query ) ) { parse_str( $p_query, $p_query );
+				if ( !is_array( $p_query ) ) {
+					parse_str( $p_query, $p_query );
 				}
 				$u_query = http_build_str( $u_query );
 				$p_query = http_build_str( $p_query );
@@ -213,7 +217,7 @@ if ( !function_exists( 'http_build_url' ) ) :
 		return $out;
 	}
 
-endif;
+}
 
 class SpecialRDFUnit extends SpecialPage {
 
diff --git a/tag/RDFTag.php b/tag/RDFTag.php
index cf0d26b..f70f94b 100644
--- a/tag/RDFTag.php
+++ b/tag/RDFTag.php
@@ -76,9 +76,9 @@ class RDFTag {
 		}
 
 		// todo Clean ?
-		$parameters = [ "?subject","?type","?property" ];
+		$parameters = [ "?subject", "?type", "?property" ];
 		$iri = "<" . $IRISource . ">";
-		$values = [ $iri,$iri,$iri ];
+		$values = [ $iri, $iri, $iri ];
 		$text = str_replace( $parameters,
 			$values,
 			$textTemp );
@@ -133,7 +133,7 @@ class RDFTag {
 		// $shaclSchemasArrayIri = [];
 		// str_replace($wikiPage->getTitle()->getBaseText()
 
-		$badChar = [ ".","/"," " ];
+		$badChar = [ ".", "/", " " ];
 		$filename = '/tmp/' . str_replace( $badChar, "", $context->getTitle()->getDBKey() ) . '.ttl';
 		$commandRDFUnit = "rapper -i turtle \"" . $filename . "\"  ";
 		// check RDF
-- 
2.39.2

$ date
--- stdout ---
Sun Mar 17 19:52:45 UTC 2024

--- end ---
$ git clone file:///srv/git/mediawiki-extensions-LinkedWiki.git repo --depth=1 -b master
--- stderr ---
Cloning into 'repo'...
--- stdout ---

--- end ---
$ git config user.name libraryupgrader
--- stdout ---

--- end ---
$ git config user.email tools.libraryupgrader@tools.wmflabs.org
--- stdout ---

--- end ---
$ git submodule update --init
--- stdout ---

--- end ---
$ grr init
--- stdout ---
Installed commit-msg hook.

--- end ---
$ git show-ref refs/heads/master
--- stdout ---
518161e5142cefb961dfce3d6835f9182bc9813f refs/heads/master

--- end ---
$ /usr/bin/npm audit --json
--- stdout ---
{
  "auditReportVersion": 2,
  "vulnerabilities": {},
  "metadata": {
    "vulnerabilities": {
      "info": 0,
      "low": 0,
      "moderate": 0,
      "high": 0,
      "critical": 0,
      "total": 0
    },
    "dependencies": {
      "prod": 6,
      "dev": 444,
      "optional": 1,
      "peer": 10,
      "peerOptional": 0,
      "total": 450
    }
  }
}

--- end ---
$ /usr/bin/composer install
--- stderr ---
No composer.lock file present. Updating dependencies to latest instead of installing from lock file. See https://getcomposer.org/install for more information.
Loading composer repositories with package information
Updating dependencies
Lock file operations: 20 installs, 0 updates, 0 removals
  - Locking bordercloud/sparql (dev-master 73915d8)
  - Locking composer/semver (3.3.2)
  - Locking composer/spdx-licenses (1.5.8)
  - Locking mediawiki/mediawiki-codesniffer (v41.0.0)
  - Locking mediawiki/minus-x (1.1.1)
  - Locking php-parallel-lint/php-console-color (v1.0.1)
  - Locking php-parallel-lint/php-console-highlighter (v1.0.0)
  - Locking php-parallel-lint/php-parallel-lint (v1.3.2)
  - Locking psr/container (2.0.2)
  - Locking squizlabs/php_codesniffer (3.7.2)
  - Locking symfony/console (v5.4.36)
  - Locking symfony/deprecation-contracts (v3.4.0)
  - Locking symfony/polyfill-ctype (v1.29.0)
  - Locking symfony/polyfill-intl-grapheme (v1.29.0)
  - Locking symfony/polyfill-intl-normalizer (v1.29.0)
  - Locking symfony/polyfill-mbstring (v1.29.0)
  - Locking symfony/polyfill-php73 (v1.29.0)
  - Locking symfony/polyfill-php80 (v1.29.0)
  - Locking symfony/service-contracts (v3.4.1)
  - Locking symfony/string (v6.4.4)
Writing lock file
Installing dependencies from lock file (including require-dev)
Package operations: 20 installs, 0 updates, 0 removals
  - Downloading bordercloud/sparql (dev-master 73915d8)
 0/1 [>---------------------------]   0%
 1/1 [============================] 100%
  - Installing bordercloud/sparql (dev-master 73915d8): Extracting archive
  - Installing symfony/polyfill-php80 (v1.29.0): Extracting archive
  - Installing squizlabs/php_codesniffer (3.7.2): Extracting archive
  - Installing symfony/polyfill-mbstring (v1.29.0): Extracting archive
  - Installing composer/spdx-licenses (1.5.8): Extracting archive
  - Installing composer/semver (3.3.2): Extracting archive
  - Installing mediawiki/mediawiki-codesniffer (v41.0.0): Extracting archive
  - Installing symfony/polyfill-intl-normalizer (v1.29.0): Extracting archive
  - Installing symfony/polyfill-intl-grapheme (v1.29.0): Extracting archive
  - Installing symfony/polyfill-ctype (v1.29.0): Extracting archive
  - Installing symfony/string (v6.4.4): Extracting archive
  - Installing psr/container (2.0.2): Extracting archive
  - Installing symfony/service-contracts (v3.4.1): Extracting archive
  - Installing symfony/polyfill-php73 (v1.29.0): Extracting archive
  - Installing symfony/deprecation-contracts (v3.4.0): Extracting archive
  - Installing symfony/console (v5.4.36): Extracting archive
  - Installing mediawiki/minus-x (1.1.1): Extracting archive
  - Installing php-parallel-lint/php-console-color (v1.0.1): Extracting archive
  - Installing php-parallel-lint/php-console-highlighter (v1.0.0): Extracting archive
  - Installing php-parallel-lint/php-parallel-lint (v1.3.2): Extracting archive
  0/20 [>---------------------------]   0%
 19/20 [==========================>-]  95%
 20/20 [============================] 100%
4 package suggestions were added by new dependencies, use `composer suggest` to see details.
Generating autoload files
14 packages you are using are looking for funding.
Use the `composer fund` command to find out more!
--- stdout ---

--- end ---
Upgrading c:mediawiki/mediawiki-codesniffer from 41.0.0 -> 43.0.0
$ /usr/bin/composer update
--- stderr ---
Loading composer repositories with package information
Updating dependencies
Lock file operations: 3 installs, 3 updates, 0 removals
  - Upgrading composer/semver (3.3.2 => 3.4.0)
  - Locking dealerdirect/phpcodesniffer-composer-installer (v1.0.0)
  - Upgrading mediawiki/mediawiki-codesniffer (v41.0.0 => v43.0.0)
  - Locking phpcsstandards/phpcsextra (1.1.2)
  - Locking phpcsstandards/phpcsutils (1.0.9)
  - Upgrading squizlabs/php_codesniffer (3.7.2 => 3.8.1)
Writing lock file
Installing dependencies from lock file (including require-dev)
Package operations: 3 installs, 3 updates, 0 removals
    0 [>---------------------------]    0 [->--------------------------]
  - Upgrading squizlabs/php_codesniffer (3.7.2 => 3.8.1): Extracting archive
  - Installing dealerdirect/phpcodesniffer-composer-installer (v1.0.0): Extracting archive
  - Installing phpcsstandards/phpcsutils (1.0.9): Extracting archive
  - Installing phpcsstandards/phpcsextra (1.1.2): Extracting archive
  - Upgrading composer/semver (3.3.2 => 3.4.0): Extracting archive
  - Upgrading mediawiki/mediawiki-codesniffer (v41.0.0 => v43.0.0): Extracting archive
 0/4 [>---------------------------]   0%
 4/4 [============================] 100%
Generating autoload files
15 packages you are using are looking for funding.
Use the `composer fund` command to find out more!
No security vulnerability advisories found
--- stdout ---
PHP CodeSniffer Config installed_paths set to ../../mediawiki/mediawiki-codesniffer,../../phpcsstandards/phpcsextra,../../phpcsstandards/phpcsutils

--- end ---
{'MediaWiki.Commenting.FunctionComment.MissingDocumentationPrivate', 'MediaWiki.Commenting.PropertyDocumentation.MissingVar', 'MediaWiki.AlternativeSyntax.AlternativeSyntax.AlternativeSyntax', 'MediaWiki.Usage.ForbiddenFunctions.exec', 'MediaWiki.Commenting.PropertyDocumentation.MissingDocumentationPrivate', 'MediaWiki.Usage.ExtendClassUsage.FunctionVarUsage'}
Tests fail!
$ vendor/bin/phpcbf
--- stdout ---

PHPCBF RESULT SUMMARY
----------------------------------------------------------------------
FILE                                                  FIXED  REMAINING
----------------------------------------------------------------------
/src/repo/tag/RDFTag.php                              6      1
/src/repo/specialpages/SpecialRDFUnit.php             18     2
/src/repo/job/InvalidatePageWithQueryJob.php          4      0
/src/repo/specialpages/SpecialRDFSave.php             17     2
/src/repo/LinkedWikiStatus.php                        1      1
/src/repo/lua/LinkedWikiLuaLibrary.php                1      0
/src/repo/parser/SparqlParser.php                     1      1
----------------------------------------------------------------------
A TOTAL OF 48 ERRORS WERE FIXED IN 7 FILES
----------------------------------------------------------------------

Time: 637ms; Memory: 6MB



--- end ---
$ git checkout .phpcs.xml
--- stderr ---
Updated 1 path from the index
--- stdout ---

--- end ---
$ /usr/bin/composer install
--- stderr ---
Installing dependencies from lock file (including require-dev)
Verifying lock file contents can be installed on current platform.
Nothing to install, update or remove
Generating autoload files
15 packages you are using are looking for funding.
Use the `composer fund` command to find out more!
--- stdout ---

--- end ---
$ /usr/bin/composer test
--- stderr ---
> parallel-lint . --exclude vendor --exclude node_modules
> minus-x check .
> phpcs -sp --cache
--- stdout ---
PHP 8.2.7 | 10 parallel jobs
..........................                                   26/26 (100 %)


Checked 26 files in 0.1 seconds
No syntax error found
MinusX
======
Processing /src/repo...
.............................................................
.............................................................
............................................................
All good!
.......................... 26 / 26 (100%)


Time: 183ms; Memory: 8MB


--- end ---
$ /usr/bin/npm audit --json
--- stdout ---
{
  "auditReportVersion": 2,
  "vulnerabilities": {},
  "metadata": {
    "vulnerabilities": {
      "info": 0,
      "low": 0,
      "moderate": 0,
      "high": 0,
      "critical": 0,
      "total": 0
    },
    "dependencies": {
      "prod": 6,
      "dev": 444,
      "optional": 1,
      "peer": 10,
      "peerOptional": 0,
      "total": 450
    }
  }
}

--- end ---
Traceback (most recent call last):
  File "/venv/lib/python3.11/site-packages/runner-0.1.0-py3.11.egg/runner/__init__.py", line 541, in fix_eslint_config
    data = gf.parse_section('eslint')
           ^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/venv/lib/python3.11/site-packages/runner-0.1.0-py3.11.egg/runner/grunt.py", line 124, in parse_section
    return self._inner_parse(base.group(1).splitlines()[1:])
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/venv/lib/python3.11/site-packages/runner-0.1.0-py3.11.egg/runner/grunt.py", line 176, in _inner_parse
    data[key] = self._inner_parse(lines[index + 1:index + subindex])
                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/venv/lib/python3.11/site-packages/runner-0.1.0-py3.11.egg/runner/grunt.py", line 205, in _inner_parse
    raise RuntimeError
RuntimeError

$ package-lock-lint package-lock.json
--- stdout ---
Checking package-lock.json

--- end ---
build: Updating mediawiki/mediawiki-codesniffer to 43.0.0

The following sniffs now pass and were enabled:
* MediaWiki.AlternativeSyntax.AlternativeSyntax.AlternativeSyntax

$ git add .
--- stdout ---

--- end ---
$ git commit -F /tmp/tmp6q6w0fzk
--- stdout ---
[master a8f063e] build: Updating mediawiki/mediawiki-codesniffer to 43.0.0
 9 files changed, 81 insertions(+), 76 deletions(-)

--- end ---
$ git format-patch HEAD~1 --stdout
--- stdout ---
From a8f063e5bb8c51f51ced5f0fdf6beeeae9d067a4 Mon Sep 17 00:00:00 2001
From: libraryupgrader <tools.libraryupgrader@tools.wmflabs.org>
Date: Sun, 17 Mar 2024 19:52:54 +0000
Subject: [PATCH] build: Updating mediawiki/mediawiki-codesniffer to 43.0.0

The following sniffs now pass and were enabled:
* MediaWiki.AlternativeSyntax.AlternativeSyntax.AlternativeSyntax

Change-Id: I8b8b49ae67871ba5f9d11e85d042714697220727
---
 .phpcs.xml                         |  1 -
 LinkedWikiStatus.php               |  2 +-
 composer.json                      |  7 ++-
 job/InvalidatePageWithQueryJob.php | 11 ++--
 lua/LinkedWikiLuaLibrary.php       |  2 +-
 parser/SparqlParser.php            |  2 +-
 specialpages/SpecialRDFSave.php    | 86 +++++++++++++++---------------
 specialpages/SpecialRDFUnit.php    | 40 +++++++-------
 tag/RDFTag.php                     |  6 +--
 9 files changed, 81 insertions(+), 76 deletions(-)

diff --git a/.phpcs.xml b/.phpcs.xml
index ab0ec1c..325cd26 100644
--- a/.phpcs.xml
+++ b/.phpcs.xml
@@ -1,7 +1,6 @@
 <?xml version="1.0"?>
 <ruleset>
 	<rule ref="./vendor/mediawiki/mediawiki-codesniffer/MediaWiki">
-		<exclude name="MediaWiki.AlternativeSyntax.AlternativeSyntax.AlternativeSyntax" />
 		<exclude name="MediaWiki.Commenting.FunctionComment.MissingDocumentationPrivate" />
 		<exclude name="MediaWiki.Commenting.PropertyDocumentation.MissingDocumentationPrivate" />
 		<exclude name="MediaWiki.Commenting.PropertyDocumentation.MissingVar" />
diff --git a/LinkedWikiStatus.php b/LinkedWikiStatus.php
index 2359e21..9be00e5 100644
--- a/LinkedWikiStatus.php
+++ b/LinkedWikiStatus.php
@@ -392,7 +392,7 @@ class LinkedWikiStatus {
 				'page_props'
 			],
 			[
-				'page_id','page_title'
+				'page_id', 'page_title'
 			],
 			[
 				'pp_propname' => self::PAGEPROP_WRITER_TAG,
diff --git a/composer.json b/composer.json
index 938a4bd..4d141df 100644
--- a/composer.json
+++ b/composer.json
@@ -13,7 +13,7 @@
 		}
 	],
 	"require-dev": {
-		"mediawiki/mediawiki-codesniffer": "41.0.0",
+		"mediawiki/mediawiki-codesniffer": "43.0.0",
 		"mediawiki/minus-x": "1.1.1",
 		"php-parallel-lint/php-console-highlighter": "1.0.0",
 		"php-parallel-lint/php-parallel-lint": "1.3.2"
@@ -29,5 +29,10 @@
 			"phpcbf"
 		],
 		"phpcs": "phpcs -sp --cache"
+	},
+	"config": {
+		"allow-plugins": {
+			"dealerdirect/phpcodesniffer-composer-installer": true
+		}
 	}
 }
diff --git a/job/InvalidatePageWithQueryJob.php b/job/InvalidatePageWithQueryJob.php
index 6ecd6db..d9b84f7 100644
--- a/job/InvalidatePageWithQueryJob.php
+++ b/job/InvalidatePageWithQueryJob.php
@@ -50,16 +50,14 @@ class InvalidatePageWithQueryJob extends Job {
 							'pp_value' => true
 						],
 						LIST_AND
-					)
-					,
+					),
 					$dbr->makeList(
 						[
 							'pp_propname' => LinkedWikiStatus::PAGEPROP_READER_MODULE,
 							'pp_value' => true
 						],
 						LIST_AND
-					)
-					,
+					),
 					$dbr->makeList(
 						[
 							'pp_propname' => LinkedWikiStatus::PAGEPROP_WRITER_MODULE,
@@ -98,7 +96,7 @@ class InvalidatePageWithQueryJob extends Job {
 				'page_props'
 			],
 			[
-				'page_id','page_title'
+				'page_id', 'page_title'
 			],
 			[
 				$dbr->makeList( [
@@ -108,8 +106,7 @@ class InvalidatePageWithQueryJob extends Job {
 							'pp_value' => true
 						],
 						LIST_AND
-					)
-					,
+					),
 					$dbr->makeList(
 						[
 							'pp_propname' => LinkedWikiStatus::PAGEPROP_WRITER_MODULE,
diff --git a/lua/LinkedWikiLuaLibrary.php b/lua/LinkedWikiLuaLibrary.php
index dc4cc14..eccc800 100644
--- a/lua/LinkedWikiLuaLibrary.php
+++ b/lua/LinkedWikiLuaLibrary.php
@@ -116,7 +116,7 @@ class LinkedWikiLuaLibrary extends Scribunto_LuaLibraryBase {
 	 * ]
 	 */
 	public function setConfig( $urlConfig = null ) {
-		try{
+		try {
 			if ( empty( $this->objConfig ) ) {
 				$this->objConfig = new LinkedWikiConfig( $urlConfig );
 			} elseif ( $this->objConfig->getConfigEndpoint() !== $urlConfig ) {
diff --git a/parser/SparqlParser.php b/parser/SparqlParser.php
index da9ab69..4316eef 100644
--- a/parser/SparqlParser.php
+++ b/parser/SparqlParser.php
@@ -904,7 +904,7 @@ class SparqlParser {
 		$classHeaders = '',
 		$headers = '' ) {
 		// error Exception caught: Request URL not set when push a page with a query
-		try{
+		try {
 			$today = date( wfMessage( 'linkedwiki-date' )->text() );
 			if ( empty( $parser ) ) {
 				return $today . " -- " .
diff --git a/specialpages/SpecialRDFSave.php b/specialpages/SpecialRDFSave.php
index c590b6e..d8c1ef7 100644
--- a/specialpages/SpecialRDFSave.php
+++ b/specialpages/SpecialRDFSave.php
@@ -127,10 +127,10 @@ EOT
 			);
 
 			if ( !empty( $deleteData ) ) {
-				try{
+				try {
 					LinkedWikiStatus::clearJobsInDatabase();
 					$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
-				}catch ( Exception $e ) {
+				} catch ( Exception $e ) {
 					$output->addHTML(
 						"There are errors. You need to fix the problem before trying to refresh the wiki."
 					);
@@ -149,59 +149,59 @@ EOT
 		}
 		// show all pages
 		if ( !empty( $refreshWikiPage ) ) {
-				try{
-					LinkedWikiStatus::clearJobsInDatabase();
-					if ( !empty( $configDefaultSaveData ) ) {
-						$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
-					}
-					$output->addHTML( LinkedWikiStatus::invalidateAllPages() );
+			try {
+				LinkedWikiStatus::clearJobsInDatabase();
+				if ( !empty( $configDefaultSaveData ) ) {
+					$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
+				}
+				$output->addHTML( LinkedWikiStatus::invalidateAllPages() );
 
-					// phpcs:disable
-					$output->addHTML(
-						<<<EOT
+				// phpcs:disable
+				$output->addHTML(
+					<<<EOT
 <br/>When all the tasks are done, the wiki will be up to date.
 You can follow the number of jobs remaining by clicking on the button "Refresh status of jobs"
 EOT
-					);
-					// phpcs:enable
-				}catch ( Exception $e ) {
-					$output->addHTML(
-						"There are errors. You need to fix the problem before trying to refresh the wiki."
-					);
-					$output->addHTML( "<br/>Error: <pre>" . htmlentities( $e->getMessage() ) . "</pre>" );
-					$this->endSpecialPage();
-					return;
-				}
+				);
+				// phpcs:enable
+			} catch ( Exception $e ) {
+				$output->addHTML(
+					"There are errors. You need to fix the problem before trying to refresh the wiki."
+				);
+				$output->addHTML( "<br/>Error: <pre>" . htmlentities( $e->getMessage() ) . "</pre>" );
+				$this->endSpecialPage();
+				return;
+			}
 			// not lazyPush
 			$jobQueueGroup->push( new InvalidatePageWithQueryJob() );
 		}
 
 		if ( !empty( $refreshData ) ) {
-				try{
-					// save all RDF tags
-					if ( !empty( $configDefaultSaveData ) ) {
-						LinkedWikiStatus::clearJobsInDatabase();
-						$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
-						$output->addHTML( LinkedWikiStatus::loadAllTagsRDFInPage() );
-					}
-					$jobQueueGroup->lazyPush( new InvalidatePageWithQueryJob() );
-					// phpcs:disable
-					$output->addHTML(
-						<<<EOT
+			try {
+				// save all RDF tags
+				if ( !empty( $configDefaultSaveData ) ) {
+					LinkedWikiStatus::clearJobsInDatabase();
+					$output->addHTML( LinkedWikiStatus::clearDefaultGraph() );
+					$output->addHTML( LinkedWikiStatus::loadAllTagsRDFInPage() );
+				}
+				$jobQueueGroup->lazyPush( new InvalidatePageWithQueryJob() );
+				// phpcs:disable
+				$output->addHTML(
+					<<<EOT
 <br/>When all the tasks are done, the wiki will be up to date.
 You can follow the number of jobs remaining by clicking on the button "Refresh status of jobs"
 EOT
-					);
-					// phpcs:enable
+				);
+				// phpcs:enable
 
-				}catch ( Exception $e ) {
-					$output->addHTML(
-						"There are errors. You need to fix the problem before trying to refresh the RDF database."
-					);
-					$output->addHTML( "<br/>Error: <pre>" . htmlentities( $e->getMessage() ) . "</pre>" );
-					$this->endSpecialPage();
-					return;
-				}
+			} catch ( Exception $e ) {
+				$output->addHTML(
+					"There are errors. You need to fix the problem before trying to refresh the RDF database."
+				);
+				$output->addHTML( "<br/>Error: <pre>" . htmlentities( $e->getMessage() ) . "</pre>" );
+				$this->endSpecialPage();
+				return;
+			}
 
 			// not lazyPush
 			$jobQueueGroup->push( new InvalidatePageWithQueryJob() );
@@ -359,7 +359,7 @@ EOT;
 			],
 			[
 				'page_touched', 'page_links_updated', 'page_namespace', 'page_title',
-				'GROUP_CONCAT(pp_propname) as props','GROUP_CONCAT(job_id) as jobs'
+				'GROUP_CONCAT(pp_propname) as props', 'GROUP_CONCAT(job_id) as jobs'
 			],
 			[
 				$dbr->makeList( [
diff --git a/specialpages/SpecialRDFUnit.php b/specialpages/SpecialRDFUnit.php
index cbdb6b0..631d8e2 100644
--- a/specialpages/SpecialRDFUnit.php
+++ b/specialpages/SpecialRDFUnit.php
@@ -13,18 +13,18 @@
 
 use MediaWiki\MediaWikiServices;
 
-defined( 'HTTP_URL_REPLACE' ) || define( 'HTTP_URL_REPLACE',        0 );
-defined( 'HTTP_URL_JOIN_PATH' ) || define( 'HTTP_URL_JOIN_PATH',      1 );
-defined( 'HTTP_URL_JOIN_QUERY' ) || define( 'HTTP_URL_JOIN_QUERY',     2 );
-defined( 'HTTP_URL_STRIP_USER' ) || define( 'HTTP_URL_STRIP_USER',     4 );
-defined( 'HTTP_URL_STRIP_PASS' ) || define( 'HTTP_URL_STRIP_PASS',     8 );
-defined( 'HTTP_URL_STRIP_AUTH' ) || define( 'HTTP_URL_STRIP_AUTH',     12 );
-defined( 'HTTP_URL_STRIP_PORT' ) || define( 'HTTP_URL_STRIP_PORT',     32 );
-defined( 'HTTP_URL_STRIP_PATH' ) || define( 'HTTP_URL_STRIP_PATH',     64 );
-defined( 'HTTP_URL_STRIP_QUERY' ) || define( 'HTTP_URL_STRIP_QUERY',    128 );
+defined( 'HTTP_URL_REPLACE' ) || define( 'HTTP_URL_REPLACE', 0 );
+defined( 'HTTP_URL_JOIN_PATH' ) || define( 'HTTP_URL_JOIN_PATH', 1 );
+defined( 'HTTP_URL_JOIN_QUERY' ) || define( 'HTTP_URL_JOIN_QUERY', 2 );
+defined( 'HTTP_URL_STRIP_USER' ) || define( 'HTTP_URL_STRIP_USER', 4 );
+defined( 'HTTP_URL_STRIP_PASS' ) || define( 'HTTP_URL_STRIP_PASS', 8 );
+defined( 'HTTP_URL_STRIP_AUTH' ) || define( 'HTTP_URL_STRIP_AUTH', 12 );
+defined( 'HTTP_URL_STRIP_PORT' ) || define( 'HTTP_URL_STRIP_PORT', 32 );
+defined( 'HTTP_URL_STRIP_PATH' ) || define( 'HTTP_URL_STRIP_PATH', 64 );
+defined( 'HTTP_URL_STRIP_QUERY' ) || define( 'HTTP_URL_STRIP_QUERY', 128 );
 defined( 'HTTP_URL_STRIP_FRAGMENT' ) || define( 'HTTP_URL_STRIP_FRAGMENT', 256 );
-defined( 'HTTP_URL_STRIP_ALL' ) || define( 'HTTP_URL_STRIP_ALL',      492 );
-if ( !function_exists( 'http_build_str' ) ) :
+defined( 'HTTP_URL_STRIP_ALL' ) || define( 'HTTP_URL_STRIP_ALL', 492 );
+if ( !function_exists( 'http_build_str' ) ) {
 
 	/**
 	 * Build query string
@@ -54,9 +54,9 @@ if ( !function_exists( 'http_build_str' ) ) :
 		return implode( $arg_separator, $out );
 	}
 
-endif;
+}
 
-if ( !function_exists( 'http_build_url' ) ) :
+if ( !function_exists( 'http_build_url' ) ) {
 
 	/**
 	 * Build a URL
@@ -113,7 +113,8 @@ if ( !function_exists( 'http_build_url' ) ) :
 			$d_path = $defaults['path'];
 			$u_path = ( isset( $url['path'] ) ? $url['path'] : '' );
 			$p_path = ( isset( $parts['path'] ) ? $parts['path'] : '' );
-			if ( $p_path ) { $u_path = '';
+			if ( $p_path ) {
+				$u_path = '';
 			}
 			$path = $d_path;
 			if ( isset( $url['host'] ) && !$p_path ) {
@@ -142,7 +143,8 @@ if ( !function_exists( 'http_build_url' ) ) :
 				} elseif ( $v == '.' ) {
 					unset( $path[$k] );
 
-				} else { $k_stack[] = $k;
+				} else {
+					$k_stack[] = $k;
 				}
 			}
 			$path = implode( '/', $path );
@@ -159,9 +161,11 @@ if ( !function_exists( 'http_build_url' ) ) :
 			if ( is_array( $p_query ) ) {
 				$query = $u_query;
 			} elseif ( $JOIN_QUERY ) {
-				if ( !is_array( $u_query ) ) { parse_str( $u_query, $u_query );
+				if ( !is_array( $u_query ) ) {
+					parse_str( $u_query, $u_query );
 				}
-				if ( !is_array( $p_query ) ) { parse_str( $p_query, $p_query );
+				if ( !is_array( $p_query ) ) {
+					parse_str( $p_query, $p_query );
 				}
 				$u_query = http_build_str( $u_query );
 				$p_query = http_build_str( $p_query );
@@ -213,7 +217,7 @@ if ( !function_exists( 'http_build_url' ) ) :
 		return $out;
 	}
 
-endif;
+}
 
 class SpecialRDFUnit extends SpecialPage {
 
diff --git a/tag/RDFTag.php b/tag/RDFTag.php
index cf0d26b..f70f94b 100644
--- a/tag/RDFTag.php
+++ b/tag/RDFTag.php
@@ -76,9 +76,9 @@ class RDFTag {
 		}
 
 		// todo Clean ?
-		$parameters = [ "?subject","?type","?property" ];
+		$parameters = [ "?subject", "?type", "?property" ];
 		$iri = "<" . $IRISource . ">";
-		$values = [ $iri,$iri,$iri ];
+		$values = [ $iri, $iri, $iri ];
 		$text = str_replace( $parameters,
 			$values,
 			$textTemp );
@@ -133,7 +133,7 @@ class RDFTag {
 		// $shaclSchemasArrayIri = [];
 		// str_replace($wikiPage->getTitle()->getBaseText()
 
-		$badChar = [ ".","/"," " ];
+		$badChar = [ ".", "/", " " ];
 		$filename = '/tmp/' . str_replace( $badChar, "", $context->getTitle()->getDBKey() ) . '.ttl';
 		$commandRDFUnit = "rapper -i turtle \"" . $filename . "\"  ";
 		// check RDF
-- 
2.39.2


--- end ---
Source code is licensed under the AGPL.