import{s as ve,ae as Ks,af as ge,f as a,l as g,a as p,g as o,h as qs,m as w,d as t,c as r,D as c,j as k,k as Hs,i as l,r as b}from"./scheduler.2f9f1739.js";import{S as be,i as _e,b as Ms,d as Ps,m as Es,a as Rs,t as Ss,e as js}from"./index.f9f1dac6.js";import{g as Ae,a as we}from"./styles.a749dd55.js";import{M as Ce}from"./MarkdownLayout.50a9a114.js";import{L as Ds}from"./link.4e908ad9.js";function ke(m){let n;return{c(){n=g("devcanvas")},l(i){n=w(i,"devcanvas")},m(i,F){l(i,n,F)},d(i){i&&t(n)}}}function $e(m){let n;return{c(){n=g("here")},l(i){n=w(i,"here")},m(i,F){l(i,n,F)},d(i){i&&t(n)}}}function Te(m){let n;return{c(){n=g("websiteplanet.com/webtools/robots-txt")},l(i){n=w(i,"websiteplanet.com/webtools/robots-txt")},m(i,F){l(i,n,F)},d(i){i&&t(n)}}}function Le(m){let n,i,F,$,y,u,ss="I then come to realize my website needs the <code>sitemap.xml</code> so Google can index them faster, and also a <code>robots.txt</code>. These makes the discovery and indexing process faster and more efficient.",ts,T,Qs="My first thought was we can’t have TXT files or XML files as route in Sveltekit, turned out I was thinking the wrong way and the solution is much more simpler.",ls,L,Xs="I thought It would be cool to share other like me, this method to step up your SEO game.",ns,q,Ys="So, the first thing we’re going to tackle is the <code>robots.txt</code> file, this file tells the search engine crawlers which part of your site it should be crawled or not. A <code>robots.txt</code> file is used primarily to manage crawler traffic to your site, and <em>usually</em> to keep a file off Google.",as,H,Ns="The way we’re going to create this <code>robots.txt</code> file for our Sveltekit website is by using endpoints. This will allow us to return a text response when ever the route is requested.",os,M,Ws="<p>Create a <code>robots.txt</code> folder in your routes, and place the <code>+server.js</code> inside of it, so it looks something like this - <code>routes/robots.txt/+server.js</code></p>",ps,f,Js=`<code><span class="line"><span style="color: #6B737C">/** </span><span style="color: #F97583">@type</span><span style="color: #6B737C"> </span><span style="color: #B392F0">{import('./$types').RequestHandler}</span><span style="color: #6B737C"> */</span></span> <span class="line"><span style="color: #F97583">export</span><span style="color: #B392F0"> </span><span style="color: #F97583">async</span><span style="color: #B392F0"> </span><span style="color: #F97583">function</span><span style="color: #B392F0"> GET({ url }) {</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #F97583">return</span><span style="color: #B392F0"> </span><span style="color: #F97583">new</span><span style="color: #B392F0"> Response(</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #FFAB70">\`</span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70">User-agent: *</span></span> <span class="line"><span style="color: #FFAB70">Allow: /</span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70"># Google adsbot ignores robots.txt unless specifically named!</span></span> <span class="line"><span style="color: #FFAB70">User-agent: AdsBot-Google</span></span> <span class="line"><span style="color: #FFAB70">Allow: /</span></span> <span class="line"></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70">User-agent: GPTBot</span></span> <span class="line"><span style="color: #FFAB70">Disallow: /</span></span> <span class="line"><span style="color: #FFAB70"> </span></span> <span class="line"><span style="color: #FFAB70"> \`</span><span style="color: #B392F0">.trim()</span></span> <span class="line"><span style="color: #B392F0"> );</span></span> <span class="line"><span style="color: #B392F0">}</span></span></code>`,rs,_,Is,A,Os,is,P,Vs="Having the <code>robots.txt</code> file alone does not mean the search engine crawler understands your website, instead it more like a signal to allow or disallow search engine crawlers to crawl or not part of our website. Also, we will have to update our robots.txt with our sitemap when we create it.",cs,E,Zs="As you can see in the <code>robots.txt</code>, we’re are blocking the <code>GPTBot</code> from crawling our website, it probably does not have much effect, but it good to have.",ys,V,x,Gs,C,Us,Z,se="robots.txt",zs,Fs,R,ee="Sitemap;",us,S,te="A sitemap enable search engine crawlers to find pages that are present in your website, which changed and when, so it can index your site accordingly.",ms,j,le="A sitemap is structured in an <code>XML</code> format, and defines, or just as the name suggest, provide the search engine with a map of your website, allowing search engine crawlers to find pages faster and more efficiently.",fs,I,ne="<p>Create a <code>sitemap.xml</code> folder in your routes, and place the <code>+server.js</code> inside of it, so it looks something like this - <code>routes/sitemap.xml/+server.js</code></p>",Bs,B,ae=`<code><span class="line"><span style="color: #F97583">export</span><span style="color: #B392F0"> </span><span style="color: #F97583">async</span><span style="color: #B392F0"> </span><span style="color: #F97583">function</span><span style="color: #B392F0"> GET() {</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #F97583">const</span><span style="color: #B392F0"> </span><span style="color: #79B8FF">xml</span><span style="color: #B392F0"> </span><span style="color: #F97583">=</span><span style="color: #B392F0"> </span><span style="color: #FFAB70">\`</span></span> <span class="line"><span style="color: #FFAB70"><?xml version="1.0" encoding="UTF-8" ?></span></span> <span class="line"><span style="color: #FFAB70"><urlset</span></span> <span class="line"><span style="color: #FFAB70"> xmlns="https://www.sitemaps.org/schemas/sitemap/0.9"</span></span> <span class="line"><span style="color: #FFAB70"> xmlns:xhtml="https://www.w3.org/1999/xhtml"</span></span> <span class="line"><span style="color: #FFAB70"> xmlns:mobile="https://www.google.com/schemas/sitemap-mobile/1.0"</span></span> <span class="line"><span style="color: #FFAB70"> xmlns:news="https://www.google.com/schemas/sitemap-news/0.9"</span></span> <span class="line"><span style="color: #FFAB70"> xmlns:image="https://www.google.com/schemas/sitemap-image/1.1"</span></span> <span class="line"><span style="color: #FFAB70"> xmlns:video="https://www.google.com/schemas/sitemap-video/1.1"</span></span> <span class="line"><span style="color: #FFAB70">></span></span> <span class="line"><span style="color: #FFAB70"><url></span></span> <span class="line"><span style="color: #FFAB70"> <loc>https://yaqeen.me</loc></span></span> <span class="line"><span style="color: #FFAB70"></url></span></span> <span class="line"><span style="color: #FFAB70"><url></span></span> <span class="line"><span style="color: #FFAB70"> <loc>https://yaqeen.me/about</loc></span></span> <span class="line"><span style="color: #FFAB70"> <lastmod>2024-01-17</lastmod></span></span> <span class="line"><span style="color: #FFAB70"></url></span></span> <span class="line"><span style="color: #FFAB70"><url></span></span> <span class="line"><span style="color: #FFAB70"> <loc>https://yaqeen.me/blog</loc></span></span> <span class="line"><span style="color: #FFAB70"></url></span></span> <span class="line"><span style="color: #FFAB70"><url></span></span> <span class="line"><span style="color: #FFAB70"> <loc>https://yaqeen.me/projects</loc></span></span> <span class="line"><span style="color: #FFAB70"> <lastmod>2024-01-17</lastmod></span></span> <span class="line"><span style="color: #FFAB70"></url></span></span> <span class="line"><span style="color: #FFAB70"><url></span></span> <span class="line"><span style="color: #FFAB70"> <loc>https://yaqeen.me/wallpapers</loc></span></span> <span class="line"><span style="color: #FFAB70"> <lastmod>2024-01-17</lastmod></span></span> <span class="line"><span style="color: #FFAB70"></url></span></span> <span class="line"><span style="color: #FFAB70"></urlset></span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70">\`</span><span style="color: #B392F0">.trim();</span></span> <span class="line"></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #F97583">return</span><span style="color: #B392F0"> </span><span style="color: #F97583">new</span><span style="color: #B392F0"> Response(xml</span><span style="color: #BBBBBB">,</span><span style="color: #B392F0"> {</span></span> <span class="line"><span style="color: #B392F0"> headers</span><span style="color: #F97583">:</span><span style="color: #B392F0"> {</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #FFAB70">'Content-Type'</span><span style="color: #F97583">:</span><span style="color: #B392F0"> </span><span style="color: #FFAB70">'application/xml'</span></span> <span class="line"><span style="color: #B392F0"> }</span></span> <span class="line"><span style="color: #B392F0"> });</span></span> <span class="line"><span style="color: #B392F0">}</span></span></code>`,ds,O,oe="This is what a simple sitemap looks like, it basically the URLs in your website.",hs,G,pe="<p>Note we’re are returning a response with a Content-Type of <code>application/xml</code>. It is very important so your response won’t be returned a plain text.</p>",gs,U,re="We can then finally update our robots.txt file to point to our sitemap.",ws,d,ie=`<code><span class="line"><span style="color: #6B737C">/** </span><span style="color: #F97583">@type</span><span style="color: #6B737C"> </span><span style="color: #B392F0">{import('./$types').RequestHandler}</span><span style="color: #6B737C"> */</span></span> <span class="line"><span style="color: #F97583">export</span><span style="color: #B392F0"> </span><span style="color: #F97583">async</span><span style="color: #B392F0"> </span><span style="color: #F97583">function</span><span style="color: #B392F0"> GET({ url }) {</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #F97583">return</span><span style="color: #B392F0"> </span><span style="color: #F97583">new</span><span style="color: #B392F0"> Response(</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #FFAB70">\`</span></span> <span class="line"><span style="color: #FFAB70"> User-agent: *</span></span> <span class="line"><span style="color: #FFAB70"> Allow: /</span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70"> # Google adsbot ignores robots.txt unless specifically named!</span></span> <span class="line"><span style="color: #FFAB70"> User-agent: AdsBot-Google</span></span> <span class="line"><span style="color: #FFAB70"> Allow: /</span></span> <span class="line"></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70"> User-agent: GPTBot</span></span> <span class="line"><span style="color: #FFAB70"> Disallow: /</span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70"> Sitemap: </span><span style="color: #F97583">\${</span><span style="color: #79B8FF">url</span><span style="color: #B392F0">.origin</span><span style="color: #F97583">}</span><span style="color: #FFAB70">/sitemap.xml</span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70"> \`</span><span style="color: #B392F0">.trim()</span></span> <span class="line"><span style="color: #B392F0"> );</span></span> <span class="line"><span style="color: #B392F0">}</span></span></code>`,xs,z,ce="Notice that we’re using <code>url.origin</code>, this is just to make it easier for us when we’re updating maybe our domain or we’re in dev mode, we don’t have to manage that.",vs,D,ye="Tip;",bs,K,Fe="In my website, I have a blog, and I need to be able to dynamically update the sitemap. I think most of us can agree it no easy task to do this manually for all your pages.",_s,Q,ue="One trick I’m using here is, in my sitemap:",As,X,me="<li>Fetch all the blog posts.</li> <li>use the map function to automatically add the blog URLs to the XML.</li>",Cs,Y,fe="<strong>Example;</strong>",ks,h,Be=`<code><span class="line"><span style="color: #F97583">export</span><span style="color: #B392F0"> </span><span style="color: #F97583">async</span><span style="color: #B392F0"> </span><span style="color: #F97583">function</span><span style="color: #B392F0"> GET({ fetch</span><span style="color: #BBBBBB">,</span><span style="color: #B392F0"> url }) {</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #F97583">const</span><span style="color: #B392F0"> </span><span style="color: #79B8FF">response</span><span style="color: #B392F0"> </span><span style="color: #F97583">=</span><span style="color: #B392F0"> </span><span style="color: #F97583">await</span><span style="color: #B392F0"> fetch(</span><span style="color: #FFAB70">'blog/get/posts/all'</span><span style="color: #B392F0">);</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #F97583">const</span><span style="color: #B392F0"> </span><span style="color: #79B8FF">posts</span><span style="color: #B392F0"> </span><span style="color: #F97583">=</span><span style="color: #B392F0"> </span><span style="color: #F97583">await</span><span style="color: #B392F0"> </span><span style="color: #79B8FF">response</span><span style="color: #B392F0">.json();</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #F97583">const</span><span style="color: #B392F0"> </span><span style="color: #79B8FF">xml</span><span style="color: #B392F0"> </span><span style="color: #F97583">=</span><span style="color: #B392F0"> </span><span style="color: #FFAB70">\`</span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70"> <!-- Rest of the site map --></span></span> <span class="line"><span style="color: #FFAB70"> <url></span></span> <span class="line"><span style="color: #FFAB70"> <loc>https://yaqeen.me</loc></span></span> <span class="line"><span style="color: #FFAB70"> </url></span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70"> <!-- Rest of the site map --></span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70"> </span><span style="color: #F97583">\${</span><span style="color: #79B8FF">posts</span><span style="color: #B392F0">.map(post) </span><span style="color: #F97583">=></span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #FFAB70">\`<url></span></span> <span class="line"><span style="color: #FFAB70"> <loc></span><span style="color: #F97583">\${</span><span style="color: #79B8FF">url</span><span style="color: #B392F0">.origin</span><span style="color: #F97583">}</span><span style="color: #FFAB70">/blog/</span><span style="color: #F97583">\${</span><span style="color: #79B8FF">post</span><span style="color: #B392F0">.slug</span><span style="color: #F97583">}</span><span style="color: #FFAB70"></loc></span></span> <span class="line"><span style="color: #FFAB70"> <lastmod></span><span style="color: #F97583">\${</span><span style="color: #79B8FF">post</span><span style="color: #B392F0">?.lastmod</span><span style="color: #F97583">}</span><span style="color: #FFAB70"></lastmod></span></span> <span class="line"><span style="color: #FFAB70"> </url>\`</span><span style="color: #B392F0">.join(</span><span style="color: #FFAB70">''</span><span style="color: #B392F0">)</span><span style="color: #F97583">}</span></span> <span class="line"></span> <span class="line"><span style="color: #FFAB70"> </urlset>\`</span><span style="color: #B392F0">.trim();</span></span> <span class="line"></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #F97583">return</span><span style="color: #B392F0"> </span><span style="color: #F97583">new</span><span style="color: #B392F0"> Response(xml</span><span style="color: #BBBBBB">,</span><span style="color: #B392F0"> {</span></span> <span class="line"><span style="color: #B392F0"> headers</span><span style="color: #F97583">:</span><span style="color: #B392F0"> {</span></span> <span class="line"><span style="color: #B392F0"> </span><span style="color: #FFAB70">'Content-Type'</span><span style="color: #F97583">:</span><span style="color: #B392F0"> </span><span style="color: #FFAB70">'application/xml'</span></span> <span class="line"><span style="color: #B392F0"> }</span></span> <span class="line"><span style="color: #B392F0"> });</span></span> <span class="line"><span style="color: #B392F0">}</span></span></code>`,$s,N,de="I use this method on this blog and it works like magic. You could use similar method for your websites as well.",Ts,W,he="Stay super awesome ✌️",Ls;return F=new Ds({props:{href:"http://devcanvas.art/blog",rel:"nofollow",$$slots:{default:[ke]},$$scope:{ctx:m}}}),A=new Ds({props:{href:"https://developers.google.com/search/docs/crawling-indexing/robots/create-robots-txt",rel:"nofollow",$$slots:{default:[$e]},$$scope:{ctx:m}}}),C=new Ds({props:{href:"https://websiteplanet.com/webtools/robots-txt",rel:"nofollow",$$slots:{default:[Te]},$$scope:{ctx:m}}}),{c(){n=a("p"),i=g("Recently I’ve been putting a lot of work into SEO for my blog and also that of "),Ms(F.$$.fragment),$=g(", but I then realized a big problem, most of my pages are not even on google yet. This is indeed a very huge problem."),y=p(),u=a("p"),u.innerHTML=ss,ts=p(),T=a("p"),T.textContent=Qs,ls=p(),L=a("p"),L.textContent=Xs,ns=p(),q=a("p"),q.innerHTML=Ys,as=p(),H=a("p"),H.innerHTML=Ns,os=p(),M=a("blockquote"),M.innerHTML=Ws,ps=p(),f=a("pre"),f.innerHTML=Js,rs=p(),_=a("p"),Is=g("You can follow the steps "),Ms(A.$$.fragment),Os=g(" to submit your robots.txt file. Though it does not make any difference as the search engine will automatically pick it up."),is=p(),P=a("p"),P.innerHTML=Vs,cs=p(),E=a("p"),E.innerHTML=Zs,ys=p(),V=a("blockquote"),x=a("p"),Gs=g("Thanks to one of our readers for letting me know about this tool "),Ms(C.$$.fragment),Us=g(" for verifying your "),Z=a("code"),Z.textContent=se,zs=g(" file."),Fs=p(),R=a("h3"),R.textContent=ee,us=p(),S=a("p"),S.textContent=te,ms=p(),j=a("p"),j.innerHTML=le,fs=p(),I=a("blockquote"),I.innerHTML=ne,Bs=p(),B=a("pre"),B.innerHTML=ae,ds=p(),O=a("p"),O.textContent=oe,hs=p(),G=a("blockquote"),G.innerHTML=pe,gs=p(),U=a("p"),U.textContent=re,ws=p(),d=a("pre"),d.innerHTML=ie,xs=p(),z=a("p"),z.innerHTML=ce,vs=p(),D=a("h3"),D.textContent=ye,bs=p(),K=a("p"),K.textContent=Fe,_s=p(),Q=a("p"),Q.textContent=ue,As=p(),X=a("ol"),X.innerHTML=me,Cs=p(),Y=a("p"),Y.innerHTML=fe,ks=p(),h=a("pre"),h.innerHTML=Be,$s=p(),N=a("p"),N.textContent=de,Ts=p(),W=a("h4"),W.textContent=he,this.h()},l(s){n=o(s,"P",{});var e=qs(n);i=w(e,"Recently I’ve been putting a lot of work into SEO for my blog and also that of "),Ps(F.$$.fragment,e),$=w(e,", but I then realized a big problem, most of my pages are not even on google yet. This is indeed a very huge problem."),e.forEach(t),y=r(s),u=o(s,"P",{"data-svelte-h":!0}),c(u)!=="svelte-1c2qnq7"&&(u.innerHTML=ss),ts=r(s),T=o(s,"P",{"data-svelte-h":!0}),c(T)!=="svelte-1266fuc"&&(T.textContent=Qs),ls=r(s),L=o(s,"P",{"data-svelte-h":!0}),c(L)!=="svelte-1df9qao"&&(L.textContent=Xs),ns=r(s),q=o(s,"P",{"data-svelte-h":!0}),c(q)!=="svelte-1jw4oei"&&(q.innerHTML=Ys),as=r(s),H=o(s,"P",{"data-svelte-h":!0}),c(H)!=="svelte-s3quvm"&&(H.innerHTML=Ns),os=r(s),M=o(s,"BLOCKQUOTE",{"data-svelte-h":!0}),c(M)!=="svelte-44v8xj"&&(M.innerHTML=Ws),ps=r(s),f=o(s,"PRE",{class:!0,style:!0,tabindex:!0,"data-svelte-h":!0}),c(f)!=="svelte-n3o3m9"&&(f.innerHTML=Js),rs=r(s),_=o(s,"P",{});var J=qs(_);Is=w(J,"You can follow the steps "),Ps(A.$$.fragment,J),Os=w(J," to submit your robots.txt file. Though it does not make any difference as the search engine will automatically pick it up."),J.forEach(t),is=r(s),P=o(s,"P",{"data-svelte-h":!0}),c(P)!=="svelte-1j1vun8"&&(P.innerHTML=Vs),cs=r(s),E=o(s,"P",{"data-svelte-h":!0}),c(E)!=="svelte-s8c5vt"&&(E.innerHTML=Zs),ys=r(s),V=o(s,"BLOCKQUOTE",{});var es=qs(V);x=o(es,"P",{});var v=qs(x);Gs=w(v,"Thanks to one of our readers for letting me know about this tool "),Ps(C.$$.fragment,v),Us=w(v," for verifying your "),Z=o(v,"CODE",{"data-svelte-h":!0}),c(Z)!=="svelte-1cnji8r"&&(Z.textContent=se),zs=w(v," file."),v.forEach(t),es.forEach(t),Fs=r(s),R=o(s,"H3",{"data-svelte-h":!0}),c(R)!=="svelte-l70qtk"&&(R.textContent=ee),us=r(s),S=o(s,"P",{"data-svelte-h":!0}),c(S)!=="svelte-f9llid"&&(S.textContent=te),ms=r(s),j=o(s,"P",{"data-svelte-h":!0}),c(j)!=="svelte-2iyf2b"&&(j.innerHTML=le),fs=r(s),I=o(s,"BLOCKQUOTE",{"data-svelte-h":!0}),c(I)!=="svelte-11v71lv"&&(I.innerHTML=ne),Bs=r(s),B=o(s,"PRE",{class:!0,style:!0,tabindex:!0,"data-svelte-h":!0}),c(B)!=="svelte-1kr8wql"&&(B.innerHTML=ae),ds=r(s),O=o(s,"P",{"data-svelte-h":!0}),c(O)!=="svelte-x6ogt6"&&(O.textContent=oe),hs=r(s),G=o(s,"BLOCKQUOTE",{"data-svelte-h":!0}),c(G)!=="svelte-1feulia"&&(G.innerHTML=pe),gs=r(s),U=o(s,"P",{"data-svelte-h":!0}),c(U)!=="svelte-v7dl43"&&(U.textContent=re),ws=r(s),d=o(s,"PRE",{class:!0,style:!0,tabindex:!0,"data-svelte-h":!0}),c(d)!=="svelte-zoyy4n"&&(d.innerHTML=ie),xs=r(s),z=o(s,"P",{"data-svelte-h":!0}),c(z)!=="svelte-1e5fiq0"&&(z.innerHTML=ce),vs=r(s),D=o(s,"H3",{"data-svelte-h":!0}),c(D)!=="svelte-ei5niq"&&(D.textContent=ye),bs=r(s),K=o(s,"P",{"data-svelte-h":!0}),c(K)!=="svelte-un8axb"&&(K.textContent=Fe),_s=r(s),Q=o(s,"P",{"data-svelte-h":!0}),c(Q)!=="svelte-125gcy6"&&(Q.textContent=ue),As=r(s),X=o(s,"OL",{"data-svelte-h":!0}),c(X)!=="svelte-z7yaog"&&(X.innerHTML=me),Cs=r(s),Y=o(s,"P",{"data-svelte-h":!0}),c(Y)!=="svelte-1u7kpxw"&&(Y.innerHTML=fe),ks=r(s),h=o(s,"PRE",{class:!0,style:!0,tabindex:!0,"data-svelte-h":!0}),c(h)!=="svelte-fqh4hz"&&(h.innerHTML=Be),$s=r(s),N=o(s,"P",{"data-svelte-h":!0}),c(N)!=="svelte-1dhc16d"&&(N.textContent=de),Ts=r(s),W=o(s,"H4",{"data-svelte-h":!0}),c(W)!=="svelte-1t8p4yc"&&(W.textContent=he),this.h()},h(){k(f,"class","shiki min-dark"),Hs(f,"background-color","#1f1f1f"),k(f,"tabindex","0"),k(B,"class","shiki min-dark"),Hs(B,"background-color","#1f1f1f"),k(B,"tabindex","0"),k(d,"class","shiki min-dark"),Hs(d,"background-color","#1f1f1f"),k(d,"tabindex","0"),k(h,"class","shiki min-dark"),Hs(h,"background-color","#1f1f1f"),k(h,"tabindex","0")},m(s,e){l(s,n,e),b(n,i),Es(F,n,null),b(n,$),l(s,y,e),l(s,u,e),l(s,ts,e),l(s,T,e),l(s,ls,e),l(s,L,e),l(s,ns,e),l(s,q,e),l(s,as,e),l(s,H,e),l(s,os,e),l(s,M,e),l(s,ps,e),l(s,f,e),l(s,rs,e),l(s,_,e),b(_,Is),Es(A,_,null),b(_,Os),l(s,is,e),l(s,P,e),l(s,cs,e),l(s,E,e),l(s,ys,e),l(s,V,e),b(V,x),b(x,Gs),Es(C,x,null),b(x,Us),b(x,Z),b(x,zs),l(s,Fs,e),l(s,R,e),l(s,us,e),l(s,S,e),l(s,ms,e),l(s,j,e),l(s,fs,e),l(s,I,e),l(s,Bs,e),l(s,B,e),l(s,ds,e),l(s,O,e),l(s,hs,e),l(s,G,e),l(s,gs,e),l(s,U,e),l(s,ws,e),l(s,d,e),l(s,xs,e),l(s,z,e),l(s,vs,e),l(s,D,e),l(s,bs,e),l(s,K,e),l(s,_s,e),l(s,Q,e),l(s,As,e),l(s,X,e),l(s,Cs,e),l(s,Y,e),l(s,ks,e),l(s,h,e),l(s,$s,e),l(s,N,e),l(s,Ts,e),l(s,W,e),Ls=!0},p(s,e){const J={};e&2&&(J.$$scope={dirty:e,ctx:s}),F.$set(J);const es={};e&2&&(es.$$scope={dirty:e,ctx:s}),A.$set(es);const v={};e&2&&(v.$$scope={dirty:e,ctx:s}),C.$set(v)},i(s){Ls||(Rs(F.$$.fragment,s),Rs(A.$$.fragment,s),Rs(C.$$.fragment,s),Ls=!0)},o(s){Ss(F.$$.fragment,s),Ss(A.$$.fragment,s),Ss(C.$$.fragment,s),Ls=!1},d(s){s&&(t(n),t(y),t(u),t(ts),t(T),t(ls),t(L),t(ns),t(q),t(as),t(H),t(os),t(M),t(ps),t(f),t(rs),t(_),t(is),t(P),t(cs),t(E),t(ys),t(V),t(Fs),t(R),t(us),t(S),t(ms),t(j),t(fs),t(I),t(Bs),t(B),t(ds),t(O),t(hs),t(G),t(gs),t(U),t(ws),t(d),t(xs),t(z),t(vs),t(D),t(bs),t(K),t(_s),t(Q),t(As),t(X),t(Cs),t(Y),t(ks),t(h),t($s),t(N),t(Ts),t(W)),js(F),js(A),js(C)}}}function qe(m){let n,i;const F=[m[0],xe];let $={$$slots:{default:[Le]},$$scope:{ctx:m}};for(let y=0;y<F.length;y+=1)$=Ks($,F[y]);return n=new Ce({props:$}),{c(){Ms(n.$$.fragment)},l(y){Ps(n.$$.fragment,y)},m(y,u){Es(n,y,u),i=!0},p(y,[u]){const ss=u&1?Ae(F,[u&1&&we(y[0]),u&0&&we(xe)]):{};u&2&&(ss.$$scope={dirty:u,ctx:y}),n.$set(ss)},i(y){i||(Rs(n.$$.fragment,y),i=!0)},o(y){Ss(n.$$.fragment,y),i=!1},d(y){js(n,y)}}}const xe={title:"Simple SEO optimization for your Sveltekit website",description:"So, the first thing we’re going to tackle is the robots.txt file, this file tells the search engine crawlers which part of your site it should be crawled or not. A robots.txt file is used primarily to manage crawler traffic to your site, and usually to keep a file off Google.",date:"2024-02-04",lastmod:"2024-05-21",categories:["sveltekit","tips","seo"],visual:!1,published:!0};function He(m,n,i){return m.$$set=F=>{i(0,n=Ks(Ks({},n),ge(F)))},n=ge(n),[n]}class je extends be{constructor(n){super(),_e(this,n,He,qe,ve,{})}}export{je as default,xe as metadata};