Avoiding use a route so used by bots.

This commit is contained in:
Sergiotarxz 2025-01-10 02:22:04 +01:00
parent 8d1a412535
commit dd3663bdff
4 changed files with 5 additions and 5 deletions

View File

@ -356,7 +356,7 @@ function onSearchChange() {
+ "//"
+ window.location.hostname
+ port
+ '/search');
+ '/search.html');
url.searchParams.set('q', query);
url.searchParams.set('e', 1);
fetch(url).then(async (res) => {
@ -364,7 +364,7 @@ function onSearchChange() {
+ "//"
+ window.location.hostname
+ port
+ '/search');
+ '/search.html');
url.searchParams.set('q', query);
history.pushState({}, '', url);
searchResults.innerHTML = await res.text();

View File

@ -57,7 +57,7 @@ sub startup ($self) {
$r->get('/product/:slug')->to('Product#direct_buy');
$r->get('/producto/:slug')->to('Product#direct_buy');
$r->get('/search.json')->to('Search#search');
$r->get('/search')->to('Search#search_user');
$r->get('/search.html')->to('Search#search_user');
$r->get('/farmacia-guardia.json')->to('FarmaciaGuardia#current');
$r->get('/<:category>.rss')->to('Page#category_rss');
$r->get('/:category_slug/atributo/<:attribute_slug>-preview.png')->to('Attribute#get_attribute_preview');

View File

@ -99,7 +99,7 @@ sub _append_attributes_category_sitemap( $self, $dom, $category_key,
sub _generate_url_for_search ( $self, $query ) {
my $url_tag = Mojo::DOM->new_tag('url');
my $base_url = $self->config('base_url');
my $url_resource = Mojo::URL->new("$base_url/search");
my $url_resource = Mojo::URL->new("$base_url/search.html");
$url_resource->query( q => $query );
my $location_tag = Mojo::DOM->new_tag( loc => $url_resource );
$url_tag->child_nodes->first->append_content($location_tag);

File diff suppressed because one or more lines are too long