<?xml version="1.0" encoding="UTF-8"?>
<!-- generator="FeedCreator 1.8" -->
<?xml-stylesheet href="http://wiki.legido.com/lib/exe/css.php?s=feed" type="text/css"?>
<rdf:RDF
    xmlns="http://purl.org/rss/1.0/"
    xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
    xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
    xmlns:dc="http://purl.org/dc/elements/1.1/">
    <channel rdf:about="http://wiki.legido.com/feed.php">
        <title>Legido Wiki - informatica:inteligencia_artificial</title>
        <description></description>
        <link>http://wiki.legido.com/</link>
        <image rdf:resource="http://wiki.legido.com/lib/exe/fetch.php?media=wiki:dokuwiki.svg" />
       <dc:date>2026-04-30T19:36:13+00:00</dc:date>
        <items>
            <rdf:Seq>
                <rdf:li rdf:resource="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:agentes&amp;rev=1764921595&amp;do=diff"/>
                <rdf:li rdf:resource="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:agentes_mac&amp;rev=1764922131&amp;do=diff"/>
                <rdf:li rdf:resource="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:beto&amp;rev=1677710577&amp;do=diff"/>
                <rdf:li rdf:resource="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:chatgpt&amp;rev=1682509753&amp;do=diff"/>
                <rdf:li rdf:resource="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:falcon&amp;rev=1698091869&amp;do=diff"/>
                <rdf:li rdf:resource="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:jukebox&amp;rev=1674814628&amp;do=diff"/>
                <rdf:li rdf:resource="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:openai&amp;rev=1677798204&amp;do=diff"/>
                <rdf:li rdf:resource="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:webs&amp;rev=1695141158&amp;do=diff"/>
            </rdf:Seq>
        </items>
    </channel>
    <image rdf:about="http://wiki.legido.com/lib/exe/fetch.php?media=wiki:dokuwiki.svg">
        <title>Legido Wiki</title>
        <link>http://wiki.legido.com/</link>
        <url>http://wiki.legido.com/lib/exe/fetch.php?media=wiki:dokuwiki.svg</url>
    </image>
    <item rdf:about="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:agentes&amp;rev=1764921595&amp;do=diff">
        <dc:format>text/html</dc:format>
        <dc:date>2025-12-05T07:59:55+00:00</dc:date>
        <dc:creator>Anonymous (anonymous@undisclosed.example.com)</dc:creator>
        <title>agentes</title>
        <link>http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:agentes&amp;rev=1764921595&amp;do=diff</link>
        <description>Agente copilot
npm install -g @github/copilotcopilot
ollama
DIR_OLLAMA=/media/jose/2Tb/ia/ollama
docker run --name ollama -v $DIR_OLLAMA/scrics:/scrics -v $DIR_OLLAMA/usr_local_lib:/usr/local/lib -v $DIR_OLLAMA/root_ollama:/root/.ollama -ti debian
MAC ponemos mas memoria al docker:</description>
    </item>
    <item rdf:about="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:agentes_mac&amp;rev=1764922131&amp;do=diff">
        <dc:format>text/html</dc:format>
        <dc:date>2025-12-05T08:08:51+00:00</dc:date>
        <dc:creator>Anonymous (anonymous@undisclosed.example.com)</dc:creator>
        <title>agentes_mac</title>
        <link>http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:agentes_mac&amp;rev=1764922131&amp;do=diff</link>
        <description>Agente ollama MAC
export DIR_OLLAMA=/Users/T054810/ia/ollama


docker run -d \
  --name ollama \
  -p 11434:11434 \
  --memory=&quot;16g&quot; \
  -v $DIR_OLLAMA/root_ollama:/root/.ollama \
  ollama/ollama</description>
    </item>
    <item rdf:about="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:beto&amp;rev=1677710577&amp;do=diff">
        <dc:format>text/html</dc:format>
        <dc:date>2023-03-01T22:42:57+00:00</dc:date>
        <dc:creator>Anonymous (anonymous@undisclosed.example.com)</dc:creator>
        <title>beto</title>
        <link>http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:beto&amp;rev=1677710577&amp;do=diff</link>
        <description>Fuente: &lt;https://colab.research.google.com/github/mrm8488/shared_colab_notebooks/blob/master/Using_Spanish_BERT_fine_tuned_for_Q%26A_pipelines.ipynb#scrollTo=2CdQh0Psl3U8&gt;

Repositorio: &lt;https://github.com/dccuchile/beto&gt;

Instalación
apt-get install python3 python3-pip ipython3
pip3 install transformers torch torchvisionipython3
from transformers import *

nlp = pipeline(
    &#039;question-answering&#039;, 
    model=&#039;mrm8488/distill-bert-base-spanish-wwm-cased-finetuned-spa-squad2-es&#039;,
    tokenizer=(
…</description>
    </item>
    <item rdf:about="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:chatgpt&amp;rev=1682509753&amp;do=diff">
        <dc:format>text/html</dc:format>
        <dc:date>2023-04-26T11:49:13+00:00</dc:date>
        <dc:creator>Anonymous (anonymous@undisclosed.example.com)</dc:creator>
        <title>chatgpt</title>
        <link>http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:chatgpt&amp;rev=1682509753&amp;do=diff</link>
        <description>El módelo 7B ocupa 30Gb

Le he dado 2 veces, la primera baja 13Gb y la segunda baja los layers y acaba con los 30Gb
 node  resultado  14  se instala y sale el modelo en la web pero no responde preguntas 16.13  se instala y sale el modelo en la web pero no responde preguntas</description>
    </item>
    <item rdf:about="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:falcon&amp;rev=1698091869&amp;do=diff">
        <dc:format>text/html</dc:format>
        <dc:date>2023-10-23T20:11:09+00:00</dc:date>
        <dc:creator>Anonymous (anonymous@undisclosed.example.com)</dc:creator>
        <title>falcon</title>
        <link>http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:falcon&amp;rev=1698091869&amp;do=diff</link>
        <description>Falcon 40B
pip install torch --no-cache-dir
pip install accelerate
pip install transformers

from transformers import AutoTokenizer, AutoModelForCausalLM
import transformers
import torch

model = &quot;tiiuae/falcon-40b&quot;

tokenizer = AutoTokenizer.from_pretrained(model)
pipeline = transformers.pipeline(
    &quot;text-generation&quot;,
    model=model,
    tokenizer=tokenizer,
    torch_dtype=torch.bfloat16,
    device_map=&quot;auto&quot;,
)
sequences = pipeline(
   &quot;Inventate una historia de un asesinato en una casa u…</description>
    </item>
    <item rdf:about="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:jukebox&amp;rev=1674814628&amp;do=diff">
        <dc:format>text/html</dc:format>
        <dc:date>2023-01-27T10:17:08+00:00</dc:date>
        <dc:creator>Anonymous (anonymous@undisclosed.example.com)</dc:creator>
        <title>jukebox</title>
        <link>http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:jukebox&amp;rev=1674814628&amp;do=diff</link>
        <description>Hacer música con inteligencia artificial

Fuente &lt;https://github.com/openai/jukebox&gt;

Primero instalar conda


apt-get update
apt-get install -y python wget git python3-soundfile
aduser jukebox
su - jukebox
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh

sed -i &#039;s/^\#\!\/bin\/sh/\#\!\/bin\/bash/g&#039; Miniconda3-latest-Linux-x86_64.sh 

chmod +x Miniconda3-latest-Linux-x86_64.sh 
./Miniconda3-latest-Linux-x86_64.sh</description>
    </item>
    <item rdf:about="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:openai&amp;rev=1677798204&amp;do=diff">
        <dc:format>text/html</dc:format>
        <dc:date>2023-03-02T23:03:24+00:00</dc:date>
        <dc:creator>Anonymous (anonymous@undisclosed.example.com)</dc:creator>
        <title>openai</title>
        <link>http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:openai&amp;rev=1677798204&amp;do=diff</link>
        <description>Openai


curl https://api.openai.com/v1/completions \
  -H &quot;Content-Type: application/json&quot; \
  -H &quot;Authorization: Bearer $OPENAI_API_KEY&quot; \
  -d &#039;{
  &quot;model&quot;: &quot;text-davinci-003&quot;,
  &quot;prompt&quot;: &quot;The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly.\n\nHuman: Hello, who are you?\nAI: I am an AI created by OpenAI. How can I help you today?\nHuman: I&#039;d like to cancel my subscription.\nAI:&quot;,
  &quot;temperature&quot;: 0.9,
  &quot;max_tokens&quot;: 150,
  &quot;to…</description>
    </item>
    <item rdf:about="http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:webs&amp;rev=1695141158&amp;do=diff">
        <dc:format>text/html</dc:format>
        <dc:date>2023-09-19T16:32:38+00:00</dc:date>
        <dc:creator>Anonymous (anonymous@undisclosed.example.com)</dc:creator>
        <title>webs</title>
        <link>http://wiki.legido.com/doku.php?id=informatica:inteligencia_artificial:webs&amp;rev=1695141158&amp;do=diff</link>
        <description>Mundos en 3D

&lt;https://skybox.blockadelabs.com/&gt;</description>
    </item>
</rdf:RDF>
