[{"data":1,"prerenderedAt":572},["ShallowReactive",2],{"footer-primary":3,"footer-secondary":93,"footer-description":119,"tv-quick-connect":121,"tv-quick-connect-seasons":132,"tv-quick-connect-episodes":152,"sales-reps":320},{"items":4},[5,29,49,69],{"id":6,"title":7,"url":8,"page":8,"children":9},"522e608a-77b0-4333-820d-d4f44be2ade1","Solutions",null,[10,15,20,25],{"id":11,"title":12,"url":8,"page":13},"fcafe85a-a798-4710-9e7a-776fe413aae5","Headless CMS",{"permalink":14},"/solutions/headless-cms",{"id":16,"title":17,"url":8,"page":18},"79972923-93cf-4777-9e32-5c9b0315fc10","Backend-as-a-Service",{"permalink":19},"/solutions/backend-as-a-service",{"id":21,"title":22,"url":8,"page":23},"0fa8d0c1-7b64-4f6f-939d-d7fdb99fc407","Product Information",{"permalink":24},"/solutions/product-information-management",{"id":26,"title":27,"url":28,"page":8},"63946d54-6052-4780-8ff4-91f5a9931dcc","100+ Things to Build","https://directus.io/blog/100-tools-apps-and-platforms-you-can-build-with-directus",{"id":30,"title":31,"url":8,"page":8,"children":32},"8ab4f9b1-f3e2-44d6-919b-011d91fe072f","Resources",[33,37,41,45],{"id":34,"title":35,"url":36,"page":8},"f951fb84-8777-4b84-9e91-996fe9d25483","Documentation","https://docs.directus.io",{"id":38,"title":39,"url":40,"page":8},"366febc7-a538-4c08-a326-e6204957f1e3","Guides","https://docs.directus.io/guides/",{"id":42,"title":43,"url":44,"page":8},"aeb9128e-1c5f-417f-863c-2449416433cd","Community","https://directus.chat",{"id":46,"title":47,"url":48,"page":8},"da1c2ed8-0a77-49b0-a903-49c56cb07de5","Release Notes","https://github.com/directus/directus/releases",{"id":50,"title":51,"url":8,"page":8,"children":52},"d61fae8c-7502-494a-822f-19ecff3d0256","Support",[53,57,61,65],{"id":54,"title":55,"url":56,"page":8},"8c43c781-7ebd-475f-a931-747e293c0a88","Issue Tracker","https://github.com/directus/directus/issues",{"id":58,"title":59,"url":60,"page":8},"d77bb78e-cf7b-4e01-932a-514414ba49d3","Feature Requests","https://github.com/directus/directus/discussions?discussions_q=is:open+sort:top",{"id":62,"title":63,"url":64,"page":8},"4346be2b-2c53-476e-b53b-becacec626a6","Community Chat","https://discord.com/channels/725371605378924594/741317677397704757",{"id":66,"title":67,"url":68,"page":8},"26c115d2-49f7-4edc-935e-d37d427fb89d","Cloud Dashboard","https://directus.cloud",{"id":70,"title":71,"url":8,"page":8,"children":72},"49141403-4f20-44ac-8453-25ace1265812","Organization",[73,78,84,88],{"id":74,"title":75,"url":76,"page":77},"1f36ea92-8a5e-47c8-914c-9822a8b9538a","About","/about",{"permalink":76},{"id":79,"title":80,"url":81,"page":82},"b84bf525-5471-4b14-a93c-225f6c386005","Careers","#",{"permalink":83},"/careers",{"id":85,"title":86,"url":87,"page":8},"86aabc3a-433d-434b-9efa-ad1d34be0a34","Brand Assets","https://drive.google.com/drive/folders/1lBOTba4RaA5ikqOn8Ewo4RYzD0XcymG9?usp=sharing",{"id":89,"title":90,"url":8,"page":91},"8d2fa1e3-198e-4405-81e1-2ceb858bc237","Contact",{"permalink":92},"/contact",{"items":94},[95,101,107,113],{"id":96,"title":97,"url":8,"page":98,"children":100},"8a1b7bfa-429d-4ffc-a650-2a5fdcf356da","Cloud Policies",{"permalink":99},"/cloud-policies",[],{"id":102,"title":103,"url":81,"page":104,"children":106},"bea848ef-828f-4306-8017-6b00ec5d4a0c","License",{"permalink":105},"/bsl",[],{"id":108,"title":109,"url":81,"page":110,"children":112},"4e914f47-4bee-42b7-b445-3119ee4196ef","Terms",{"permalink":111},"/terms",[],{"id":114,"title":115,"url":81,"page":116,"children":118},"ea69eda6-d317-4981-8421-fcabb1826bfd","Privacy",{"permalink":117},"/privacy",[],{"description":120},"\u003Cp>A composable backend to build your Headless CMS, BaaS, and more.&nbsp;\u003C/p>",{"id":122,"title":123,"logo":124,"cover":125,"tile":126,"announcement_text":8,"description":127,"slug":128,"one_liner":129,"card_text":8,"status":130,"sort":131},"dcc3b569-c1c2-4aa0-a011-476cca3c24bc","Quick Connect","a67e7fee-ae02-4fb8-8d73-fe615b01ac43","adbaf7ab-2d65-4b0b-bc2a-50f011cc08f5","1171b046-491e-4cfb-a68c-527b89c2c348","Each episode of ‘Quick Connect’ is a mini-masterclass, showcasing the power of integrating third-party services into your Directus project. Whether it's crafting a custom workflow or adding a unique extension, we'll guide you through the process in easy-to-follow steps.","quick-connect","Learn how to integrate third-party services with Directus in just ten minutes.","published",7,[133,139],{"id":134,"number":135,"show":122,"year":136,"episodes":137},"cf7a056d-fa10-4bc5-8cc3-c2b9ef59b684",2,"2024",[138],"979db4da-a870-4120-94ee-bd80789f411c",{"id":140,"number":141,"show":122,"year":142,"episodes":143},"3b8b7d34-a0fb-4ea6-85ff-2b5bfbb8e0b6",1,"2023",[144,145,146,147,148,149,150,151],"502dcf7e-c23e-4dfd-b147-65f5abaea5c7","a230c9ef-8db4-4c00-a0cb-9524f7934eb0","5f41dc16-29b7-485f-a6e1-081c3f1acc4f","81417d25-26d2-4f05-be37-7ced51a0594e","8f933ee9-4e4f-4e35-8c1f-e99ad0684bfa","71e081db-92f8-4978-b020-7d2460a46187","8e47020d-bd5a-43a7-bca9-54af4f5d465d","bfb8bc25-ef1b-4544-b50d-402008c638a1",[153,175,196,218,235,257,274,290,306],{"id":144,"slug":154,"vimeo_id":155,"description":156,"tile":157,"length":131,"resources":158,"people":162,"episode_number":141,"published":166,"title":167,"video_transcript_html":168,"video_transcript_text":169,"content":8,"seo":8,"status":130,"episode_people":170,"recommendations":172,"season":173},"vonage","895915734","Build a prompt which allows customer notifications via text message using the Vonage SMS API","a73e5c66-b217-469a-b76b-a5e13d72f8d6",[159],{"name":160,"url":161},"Vonage SMS API Docs","https://developer.vonage.com/en/messaging/sms/overview",[163],{"name":164,"url":165},"Kevin Lewis","https://directus.io/team/kevin-lewis","2023-12-22","Sending SMS Messages with Vonage","\u003Cp>Speaker 0: Vonage is a cloud communication platform that lets you integrate messaging, voice, and video into your applications. And today on Quick Connect, we're going to set up a system that will allow us to send messages directly to users from an item page. So let's get started. On the Vonage side, you're going to need a couple of things. Firstly, you're going to need a Vonage API account.\u003C/p>\u003Cp>And with that account, you will get an API key and an API secret, which we're going to need in a moment. You're also going to need a phone number from the Vonage programmable numbers product line. So you can go ahead, grab a new number straight here in the dashboard, and make sure that that number has the SMS feature enabled. Now that's everything we need over here, so take note of your API key, secret, and the phone number, and then head over to your director's project. Before we build out this connection to Vonage, I wanna just take a note that I've created this orders collection.\u003C/p>\u003Cp>And in that orders collection, there is a phone field. That will be the user that that will be the phone number that receives the our text message. Let's go and create a new flow here, and we will call this send SMS. And we'll go ahead and set this based on a manual trigger. We'll do it on just the orders collection, and it will require a confirmation dialogue.\u003C/p>\u003Cp>So this is like a pop up box that can collect more information before triggering the flow so we'll call this one message text text we'll make it a text box and hit save. So we'll get to see what that confirmation dialogue does. Let's go back to our order, and we see here in the sidebar, there's now a send SMS button. This is the confirmation dialogue. So let's write a message, hello, world, bedtime lucky world, and we'll run flow on current item.\u003C/p>\u003Cp>Let's go back to the flow and make sure that's worked. We see over here, there's a one on the log, So let's drop that down, and we get to see here in the payload is, inside of the body of the payload, the text that we entered in the box and the ID of the order. So now we're gonna go ahead and do something with this. Now you'll notice we have the key for the order, but what we don't have is the actual phone number, the actual contents itself. And this is actually all that's passed when you do a manual, manual triggered flow.\u003C/p>\u003Cp>So let's go ahead and grab the whole record to make sure we have the phone number. So let's call this get order get order, and we are going to set that to read data. So let's go ahead and set this to full access, get the orders collection. And what we'll do here is we will get a dynamic value from the trigger. So trigger trigger dot body dot keys, and we just want the first item there.\u003C/p>\u003Cp>So let's hit save. Save again. And with every step, we're gonna make sure this this works. So let's go back to our order. Let's run the flow again, and we'll head back to the flow.\u003C/p>\u003Cp>And we see now there's a second log. So in get order, we now have the full record, including the phone number. Take note again that this operation has a key of get underscore order. Next, we're gonna go and actually send a text message. So let's call this one send message.\u003C/p>\u003Cp>Send message. And we're going to do this by requesting a URL. Now I've taken a look at the Vonage API documentation ahead of time, so I can shortcut us a little bit here, which is that we, in order to send an SMS message, need to send a post request to this URL, res.nextmo.com/sms /json. Now there are a set of query parameters we require in order to make this successful. Firstly, we need to specify who's going to receive the text message.\u003C/p>\u003Cp>So we have that and get underscore order because that's this operation here dot phone. Next, we need to say who are we sending this from. And so I've actually just copied and pasted my Vonage phone number that I showed you at the beginning. So to and from. Great.\u003C/p>\u003Cp>Next is the actual message contents itself. So trigger dot body dot text. So that's what we inserted in the box. And then finally, we need to provide our API key and API secret here, which I'm just copying and pasting. API key and API secret.\u003C/p>\u003Cp>So let's hit save now, save again, and let's go and retrigger this. Oh, hello to your phone. Run flow on current item. And, hopefully, now if I go over there you go. So this is just, this is just the messages app on on macOS piping the text message right here.\u003C/p>\u003Cp>So that is a little example of how you can connect, Vonage and Directus in order to send SMS messages on demand. Now there are loads of ways you can achieve this. Here, we've used directors flows, and we've kind of hard coded some values inside of this request here. And you can also build custom operations in order to kind of abstract this away and make it a little bit easier so you don't have to provide all of these query parameters every time. And, of course, this doesn't have to be indirect as automate and flows.\u003C/p>\u003Cp>It can be over in our insights dashboards, or it can be in a hook and so many other ways. But this is a little starting point to see how you can send SMS, messages, text messages using Vonage and Directus together. Hope you've enjoyed this episode of Quick Connect, and I'll see you in the next episode.\u003C/p>","Vonage is a cloud communication platform that lets you integrate messaging, voice, and video into your applications. And today on Quick Connect, we're going to set up a system that will allow us to send messages directly to users from an item page. So let's get started. On the Vonage side, you're going to need a couple of things. Firstly, you're going to need a Vonage API account. And with that account, you will get an API key and an API secret, which we're going to need in a moment. You're also going to need a phone number from the Vonage programmable numbers product line. So you can go ahead, grab a new number straight here in the dashboard, and make sure that that number has the SMS feature enabled. Now that's everything we need over here, so take note of your API key, secret, and the phone number, and then head over to your director's project. Before we build out this connection to Vonage, I wanna just take a note that I've created this orders collection. And in that orders collection, there is a phone field. That will be the user that that will be the phone number that receives the our text message. Let's go and create a new flow here, and we will call this send SMS. And we'll go ahead and set this based on a manual trigger. We'll do it on just the orders collection, and it will require a confirmation dialogue. So this is like a pop up box that can collect more information before triggering the flow so we'll call this one message text text we'll make it a text box and hit save. So we'll get to see what that confirmation dialogue does. Let's go back to our order, and we see here in the sidebar, there's now a send SMS button. This is the confirmation dialogue. So let's write a message, hello, world, bedtime lucky world, and we'll run flow on current item. Let's go back to the flow and make sure that's worked. We see over here, there's a one on the log, So let's drop that down, and we get to see here in the payload is, inside of the body of the payload, the text that we entered in the box and the ID of the order. So now we're gonna go ahead and do something with this. Now you'll notice we have the key for the order, but what we don't have is the actual phone number, the actual contents itself. And this is actually all that's passed when you do a manual, manual triggered flow. So let's go ahead and grab the whole record to make sure we have the phone number. So let's call this get order get order, and we are going to set that to read data. So let's go ahead and set this to full access, get the orders collection. And what we'll do here is we will get a dynamic value from the trigger. So trigger trigger dot body dot keys, and we just want the first item there. So let's hit save. Save again. And with every step, we're gonna make sure this this works. So let's go back to our order. Let's run the flow again, and we'll head back to the flow. And we see now there's a second log. So in get order, we now have the full record, including the phone number. Take note again that this operation has a key of get underscore order. Next, we're gonna go and actually send a text message. So let's call this one send message. Send message. And we're going to do this by requesting a URL. Now I've taken a look at the Vonage API documentation ahead of time, so I can shortcut us a little bit here, which is that we, in order to send an SMS message, need to send a post request to this URL, res.nextmo.com/sms /json. Now there are a set of query parameters we require in order to make this successful. Firstly, we need to specify who's going to receive the text message. So we have that and get underscore order because that's this operation here dot phone. Next, we need to say who are we sending this from. And so I've actually just copied and pasted my Vonage phone number that I showed you at the beginning. So to and from. Great. Next is the actual message contents itself. So trigger dot body dot text. So that's what we inserted in the box. And then finally, we need to provide our API key and API secret here, which I'm just copying and pasting. API key and API secret. So let's hit save now, save again, and let's go and retrigger this. Oh, hello to your phone. Run flow on current item. And, hopefully, now if I go over there you go. So this is just, this is just the messages app on on macOS piping the text message right here. So that is a little example of how you can connect, Vonage and Directus in order to send SMS messages on demand. Now there are loads of ways you can achieve this. Here, we've used directors flows, and we've kind of hard coded some values inside of this request here. And you can also build custom operations in order to kind of abstract this away and make it a little bit easier so you don't have to provide all of these query parameters every time. And, of course, this doesn't have to be indirect as automate and flows. It can be over in our insights dashboards, or it can be in a hook and so many other ways. But this is a little starting point to see how you can send SMS, messages, text messages using Vonage and Directus together. Hope you've enjoyed this episode of Quick Connect, and I'll see you in the next episode.",[171],"b477a853-5181-4816-9ef9-be50b2258204",[],{"id":140,"number":141,"show":122,"year":142,"episodes":174},[144,145,146,147,148,149,150,151],{"id":145,"slug":176,"vimeo_id":177,"description":178,"tile":179,"length":180,"resources":181,"people":185,"episode_number":135,"published":187,"title":188,"video_transcript_html":189,"video_transcript_text":190,"content":8,"seo":8,"status":130,"episode_people":191,"recommendations":193,"season":194},"deepgram","895925213","Automatically transcribe new audio files with Deepgram's Speech-to-Text API.","f894ea9e-da61-4529-b51a-b399acd1bdc8",11,[182],{"name":183,"url":184},"Deepgram Docs","https://developers.deepgram.com/docs",[186],{"name":164,"url":165},"2024-01-05","Transcribe Audio Files with Deepgram","\u003Cp>Speaker 0: Deepgram offers a speech to text API that allows us to send off audio files and receive transcriptions in return. Today in QuickConnect, we're going to connect to Deepgram. So whenever we upload a file to our director's project, if it's an audio file, it will automatically go off, generate a transcription, and then save that transcription to the file description. So let's get started. The first thing we'll need is a Deepgram API key.\u003C/p>\u003Cp>So head to the Deepgram console and create a new API key. You can give it any name you want and set the permissions. We only require the lowest level member permissions. We'll set the API key to never expire and hit create key. I'm gonna take note of this API key for later because I only get to see it right now one time.\u003C/p>\u003Cp>If I lose this key, I'll need to come and generate a new one here in the Deepgram console. So hit got it and head over to your directors project. Let's create a new flow in our directors project. I'll call this one transcribe new audio files. We're going to set this up to trigger whenever an event happens in our directors project.\u003C/p>\u003Cp>We're going to pick non blocking, which means that all of directors' built in functionality will not be stopped or halted or paused, but this logic will run-in parallel. Now in terms of scope, we're going to pick files dot upload. So this will trigger every time a file is uploaded, and we will hit save. So that is our trigger. Now the first thing we're going to do is actually make sure that we only continue this logic if the file is an audio file.\u003C/p>\u003Cp>Because right now, this flow will start on every single file upload regardless of file type and location. So let's create a new operation. I'll just call it check and add some conditional logic. This condition ensures that the file type contains the word audio. The file type could be something like audio /mp3audio.wav and so on.\u003C/p>\u003Cp>But they all start with audio. So we're gonna check that the file type contains the word audio. Now there are 2 paths here, the resolve path and the reject path. Now if this condition is not true, I e, the file is not an audio file, we will go down the reject path, and we're just gonna add nothing there, which means the whole flow just just stops. It completes at this point.\u003C/p>\u003Cp>Let's create a new operation on the resolve track. And in here, we're going to call it Deepgram. We're gonna go ahead and make this a webhook slash request URL, operation, and this provides some additional options. Now in the Deepgram documentation, they give you a URL to use when you want to go ahead and create transcriptions. It needs to be a post request to api.deepgram.com/vone/listen.\u003C/p>\u003Cp>And then you can add any number of query parameters to change what Deepgram is going to do and return with that audio file. So here we're saying, basically, make it human readable, add some add some formatting, and diarization will split out who's speaking. So it will tell us whether it's speaker 1 or speaker 2 and so on. Now Deepgram also requires a header in order to authenticate. So we're gonna go ahead and add a new header.\u003C/p>\u003Cp>We're going to call this one authorization, capital a, and the value will be the word token, space, and then your Deepgram API key, and hit save. Now the final thing we need to do is actually give it the file URL. So what we're actually gonna do is pause here for just a moment, and we're going to try and trigger this flow as it stands. This will fail because we're not providing the file, but we're gonna see what this does. So I'm gonna go ahead and add a new file to our file library.\u003C/p>\u003Cp>Here we go. So we'll upload that file. Fantastic we will head back to our flow and we'll see here that there's a little one icon here one run and we'll get to see. So we get a trigger. So this is the actual file being uploaded.\u003C/p>\u003Cp>Now there's an interesting thing here. We see inside of this trigger value, there is a key. This is the unique identifier for this file in the director system. Then we see here that this was the check, and then we did a Deepgram API call with our header. And the returned payload, as we expected, is incorrect because we didn't actually send the file.\u003C/p>\u003Cp>The reason I wanted to do this is because I wanted to show you that we need this ID trigger dot key. So now let's go in and edit this, this request over here. I'm I'm gonna just paste it in, and we're gonna explain it here. So every asset in Directus is accessible with the full URL of the Directus project /assets/thekey, the unique ID for that file. So this is now the URL for that file.\u003C/p>\u003Cp>If I go to this complete URL, of course, putting the real key in here, I would actually get that file. However, there is one extra thing to note here, which is that right now as it stands, this directors project does not make all of the files public. Right now, if we continue with this request and we run that again, we reupload a file, there'll be an authentication error. Directus won't be able to reach that file. Then we have a few options.\u003C/p>\u003Cp>The first is to go into the public role, go into directors files, and make that file or the directory which the file lives in public. That's one viable option. But there is another way to do this, which perhaps is a little more recommended. Let me go into my user account here and generate an API token. So I will save that and hit save.\u003C/p>\u003Cp>Going back to my flow here, I'm gonna once again edit this URL, and I'm going to append to the end of it access_token, and then my API key. This will authenticate the API request. So this now is a file that I can access. Let's trigger this flow again. Let's go over to our file library.\u003C/p>\u003Cp>I'm actually just gonna go ahead and re upload the file again. So it's a fresh file. So we'll have a duplicate here, but that will trigger the flow to rerun. Let's look at this second run of the flow. The returned payload comes back and there is our transcript.\u003C/p>\u003Cp>That's fantastic. So we get that back. So our Deepgram request was successful. There's actually quite a large payload that comes back here. Data, results, channels, which is an array of objects, alternatives, which is an array, and there is our transcript.\u003C/p>\u003Cp>Because we turned on smart formatting, there is also a ton of extra metadata that comes back. Like, it's huge. Every single word, every single word there. And we also get our paragraphs coming back with a transcript and so on and so forth. So you can go and explore this returned object.\u003C/p>\u003Cp>I, surprise, surprise, have already done that work for us. The final step is going to be to actually save this value back to the file description. So let's create a new operation here, and let's make this an update data, operation. In the collection field, we are going to update a director's files item. And we're going to put in here the ID of that initial item trigger dot key.\u003C/p>\u003Cp>So now we're gonna be updating the item that actually triggered this whole flow in the first place and in turn was transcribed. Now the final thing we're gonna do here is we are going to update the description. So let's go ahead and do that here together. The description, which is a built in field, is going to be equal to Deepgram and the reason we can say Deepgram is because that is the key of this step it's called Deepgram Deepgram dot data dot results dot channels. That was an array Dot alternatives.\u003C/p>\u003Cp>That was an array.paragraphs dot transcript. Oh, rolls right off the tongue that. And, of course, that might be a little bit of development trial and error, but I know that that is the location of the formatted transcript. Let's hit save. Let's hit save again.\u003C/p>\u003Cp>And now let's go and upload once again a new file. So that's uploading. Now in the background, that flow has already been triggered. Deepgram has or the condition has already been checked. Deepgram has already been called.\u003C/p>\u003Cp>And in theory, if we go in here now, we should see in the description did I pick the wrong file? Yes. I did. It was this one. The description with speaker diarization and formatting.\u003C/p>\u003Cp>So, automatically, now that this flow is set up and enabled, I can go hands off and know that every audio file that gets uploaded will be transcribed. Now at the time of recording, Deepgram do also accept video files as well, but you can expect that they'll take a little bit longer to return. And, typically, you should be trying to send us as little, you know, data as possible. So audio files was our condition, but you can widen that condition as well. So let's just sum up the way that that flow looks.\u003C/p>\u003Cp>Transcribe new audio files. This is triggered whenever a new file is uploaded. And just to be clear, that's every file. So what we do is we check that it's an audio file, then we go off to the Deepgram API authenticating with our API key. And then once we get the transcript back, we update this file with the transcript.\u003C/p>\u003Cp>Now there are some extra things just to consider here. Right now, this will happen on every audio file. You can add further conditions to perhaps specify down to a folder or only allow specific users to do this, or you can make it a manual step. You can set up a flow with a button on an item page and go ahead then and only generate a transcript on demand. I hope you found this interesting, and I'll see you in the next episode.\u003C/p>","Deepgram offers a speech to text API that allows us to send off audio files and receive transcriptions in return. Today in QuickConnect, we're going to connect to Deepgram. So whenever we upload a file to our director's project, if it's an audio file, it will automatically go off, generate a transcription, and then save that transcription to the file description. So let's get started. The first thing we'll need is a Deepgram API key. So head to the Deepgram console and create a new API key. You can give it any name you want and set the permissions. We only require the lowest level member permissions. We'll set the API key to never expire and hit create key. I'm gonna take note of this API key for later because I only get to see it right now one time. If I lose this key, I'll need to come and generate a new one here in the Deepgram console. So hit got it and head over to your directors project. Let's create a new flow in our directors project. I'll call this one transcribe new audio files. We're going to set this up to trigger whenever an event happens in our directors project. We're going to pick non blocking, which means that all of directors' built in functionality will not be stopped or halted or paused, but this logic will run-in parallel. Now in terms of scope, we're going to pick files dot upload. So this will trigger every time a file is uploaded, and we will hit save. So that is our trigger. Now the first thing we're going to do is actually make sure that we only continue this logic if the file is an audio file. Because right now, this flow will start on every single file upload regardless of file type and location. So let's create a new operation. I'll just call it check and add some conditional logic. This condition ensures that the file type contains the word audio. The file type could be something like audio /mp3audio.wav and so on. But they all start with audio. So we're gonna check that the file type contains the word audio. Now there are 2 paths here, the resolve path and the reject path. Now if this condition is not true, I e, the file is not an audio file, we will go down the reject path, and we're just gonna add nothing there, which means the whole flow just just stops. It completes at this point. Let's create a new operation on the resolve track. And in here, we're going to call it Deepgram. We're gonna go ahead and make this a webhook slash request URL, operation, and this provides some additional options. Now in the Deepgram documentation, they give you a URL to use when you want to go ahead and create transcriptions. It needs to be a post request to api.deepgram.com/vone/listen. And then you can add any number of query parameters to change what Deepgram is going to do and return with that audio file. So here we're saying, basically, make it human readable, add some add some formatting, and diarization will split out who's speaking. So it will tell us whether it's speaker 1 or speaker 2 and so on. Now Deepgram also requires a header in order to authenticate. So we're gonna go ahead and add a new header. We're going to call this one authorization, capital a, and the value will be the word token, space, and then your Deepgram API key, and hit save. Now the final thing we need to do is actually give it the file URL. So what we're actually gonna do is pause here for just a moment, and we're going to try and trigger this flow as it stands. This will fail because we're not providing the file, but we're gonna see what this does. So I'm gonna go ahead and add a new file to our file library. Here we go. So we'll upload that file. Fantastic we will head back to our flow and we'll see here that there's a little one icon here one run and we'll get to see. So we get a trigger. So this is the actual file being uploaded. Now there's an interesting thing here. We see inside of this trigger value, there is a key. This is the unique identifier for this file in the director system. Then we see here that this was the check, and then we did a Deepgram API call with our header. And the returned payload, as we expected, is incorrect because we didn't actually send the file. The reason I wanted to do this is because I wanted to show you that we need this ID trigger dot key. So now let's go in and edit this, this request over here. I'm I'm gonna just paste it in, and we're gonna explain it here. So every asset in Directus is accessible with the full URL of the Directus project /assets/thekey, the unique ID for that file. So this is now the URL for that file. If I go to this complete URL, of course, putting the real key in here, I would actually get that file. However, there is one extra thing to note here, which is that right now as it stands, this directors project does not make all of the files public. Right now, if we continue with this request and we run that again, we reupload a file, there'll be an authentication error. Directus won't be able to reach that file. Then we have a few options. The first is to go into the public role, go into directors files, and make that file or the directory which the file lives in public. That's one viable option. But there is another way to do this, which perhaps is a little more recommended. Let me go into my user account here and generate an API token. So I will save that and hit save. Going back to my flow here, I'm gonna once again edit this URL, and I'm going to append to the end of it access_token, and then my API key. This will authenticate the API request. So this now is a file that I can access. Let's trigger this flow again. Let's go over to our file library. I'm actually just gonna go ahead and re upload the file again. So it's a fresh file. So we'll have a duplicate here, but that will trigger the flow to rerun. Let's look at this second run of the flow. The returned payload comes back and there is our transcript. That's fantastic. So we get that back. So our Deepgram request was successful. There's actually quite a large payload that comes back here. Data, results, channels, which is an array of objects, alternatives, which is an array, and there is our transcript. Because we turned on smart formatting, there is also a ton of extra metadata that comes back. Like, it's huge. Every single word, every single word there. And we also get our paragraphs coming back with a transcript and so on and so forth. So you can go and explore this returned object. I, surprise, surprise, have already done that work for us. The final step is going to be to actually save this value back to the file description. So let's create a new operation here, and let's make this an update data, operation. In the collection field, we are going to update a director's files item. And we're going to put in here the ID of that initial item trigger dot key. So now we're gonna be updating the item that actually triggered this whole flow in the first place and in turn was transcribed. Now the final thing we're gonna do here is we are going to update the description. So let's go ahead and do that here together. The description, which is a built in field, is going to be equal to Deepgram and the reason we can say Deepgram is because that is the key of this step it's called Deepgram Deepgram dot data dot results dot channels. That was an array Dot alternatives. That was an array.paragraphs dot transcript. Oh, rolls right off the tongue that. And, of course, that might be a little bit of development trial and error, but I know that that is the location of the formatted transcript. Let's hit save. Let's hit save again. And now let's go and upload once again a new file. So that's uploading. Now in the background, that flow has already been triggered. Deepgram has or the condition has already been checked. Deepgram has already been called. And in theory, if we go in here now, we should see in the description did I pick the wrong file? Yes. I did. It was this one. The description with speaker diarization and formatting. So, automatically, now that this flow is set up and enabled, I can go hands off and know that every audio file that gets uploaded will be transcribed. Now at the time of recording, Deepgram do also accept video files as well, but you can expect that they'll take a little bit longer to return. And, typically, you should be trying to send us as little, you know, data as possible. So audio files was our condition, but you can widen that condition as well. So let's just sum up the way that that flow looks. Transcribe new audio files. This is triggered whenever a new file is uploaded. And just to be clear, that's every file. So what we do is we check that it's an audio file, then we go off to the Deepgram API authenticating with our API key. And then once we get the transcript back, we update this file with the transcript. Now there are some extra things just to consider here. Right now, this will happen on every audio file. You can add further conditions to perhaps specify down to a folder or only allow specific users to do this, or you can make it a manual step. You can set up a flow with a button on an item page and go ahead then and only generate a transcript on demand. I hope you found this interesting, and I'll see you in the next episode.",[192],"9e674d17-1aac-48b1-9a69-0807e41d21c0",[],{"id":140,"number":141,"show":122,"year":142,"episodes":195},[144,145,146,147,148,149,150,151],{"id":146,"slug":197,"vimeo_id":198,"description":199,"tile":200,"length":201,"resources":202,"people":206,"episode_number":208,"published":209,"title":210,"video_transcript_html":211,"video_transcript_text":212,"content":8,"seo":8,"status":130,"episode_people":213,"recommendations":215,"season":216},"clearbit","895919599","Automatically enrich new user profiles with rich data from Clearbit.","941881a9-2f6c-4e88-90c4-d27c6be2346e",8,[203],{"name":204,"url":205},"Clearbit Enrichment API","https://clearbit.com/platform/enrichment",[207],{"name":164,"url":165},3,"2024-01-12","Enriching User Profiles with Clearbit","\u003Cp>Speaker 0: Clearbit provides an enrichment API that lets you send off just an email address for a person, and it will return a much more full contact containing all the information that Clearbit provides. In this episode of Quick Connect, we're going to automatically enrich data from new users in our director's project. So with that, let's get started. The first thing you'll need is a Clearbit API key, which you can get inside of the Clearbit dashboard. It's called the secret API key, so just go ahead and copy that.\u003C/p>\u003Cp>And be certain that no one else ever gets hold of this key. Otherwise, they can also perform requests against your Clearbit account. I save this for later, and let's head back over to our Director's project to set up our connection. Here in our directors project, let's create a new flow. I'm gonna call this one clearbit enrichment.\u003C/p>\u003Cp>For the trigger, we're going to use an event hook and make this action non blocking. That means that an item, a user will be entered into the database and the clear bit enrichment will happen in parallel, which means we don't slow down or otherwise prevent the user creation from taking place. The scope is going to be items dot create on the directors users collection. Hit save to set up our trigger. Before we continue, we are going to see what the shape of the data is when a new user is created.\u003C/p>\u003Cp>So let's save this flow, head over to our user module, and create a new user. I'll just call this one joeblogs, very original, joe@example.com, and hit save. Let's go back over to our flow and we'll see here that there is a one inside of the logs. So this is one time that this flow has been triggered. We see here in the payload that we get a first name, a last name, and an email, and indeed any other information provided on creation of this user.\u003C/p>\u003Cp>So take a note here that we have payload email. This is where the email lies, which we're going to need in a moment to use Clearbit. Also, take a note that this user, this item in the director's users collection has a unique key. We'll also need this later when we update the user record with the Clearbit data. The next thing we're gonna do is create an operation I'm gonna call this one clear bit and take note that the key is also Clearbit or lowercase.\u003C/p>\u003Cp>This key will be important later. We're going to request a URL, and we're going to send a get request to the Clearbit API. So as per the Clearbit documentation, we're using the email people enrichment endpoint. So person.clearbit.comvone/people/email, and then a dynamic value based on the value of the trigger. So just before it was joe@example.com.\u003C/p>\u003Cp>Now to authenticate with Clearbit, we're going to need to add a header. So we're gonna go ahead and do that. The header name is authorization, and the value is bearer space, and then your Clearbit API key. Once again, make sure other people don't see this key as they don't provide a way to invalidate and regenerate an API key manually. So let's go ahead and give this a test.\u003C/p>\u003Cp>Let's hit save. Let's go back to our user module. Let's create a new user. I'm going to use our CEO Ben's email address here and hit save. Let's go back to our flows.\u003C/p>\u003Cp>Let's look at our latest log here, and let's look at the returned payload from Clearbit. We see that it was a successful request. There's an there's a property called data, which is an object. And inside of this, a large object containing all the information that Clearbit holds on, Ben. So we have things like well, it's the Gravatar URL, so that's like the the avatar, LinkedIn usernames.\u003C/p>\u003Cp>We don't have a Twitter handle, but that would be here. GitHub username, Facebook. We also have things like where are they based, what's their time zone offset, and stuff like that. The website direct to. Now what we're gonna do just to demonstrate how you would then go and update a user is we're just gonna take the location here, and we're going to update Ben's brand new user profile with the location.\u003C/p>\u003Cp>If we take a look back at the user directory, a user object here, we see there's a bunch of provided fields, location, title, description, tags, and so on. You can add custom fields inside of the data model settings in your director's project. But seeing as location already exists, we'll just use that. So let's add one more operation here, and we're gonna make this an update data operation. I'm gonna go in here.\u003C/p>\u003Cp>And first thing we're gonna do is say we're gonna update an item in the directors users collection. The ID of the user we're updating will be trigger dot key. Remember I mentioned that earlier on? And the payload, the data we will actually be updating is the location. And we said that this would be clear bit dot data dot location.\u003C/p>\u003Cp>It's clear bit because that was the key on this step here that return the data. So let's hit save, save once more, and run this flow again. Just to keep this easy, I'm gonna delete this user and then recreate it so we don't get confused. Perfect. We'll hit save.\u003C/p>\u003Cp>In the background the moment I hit save the user was saved but also the Clearbit operation was run Firstly, we see that it did indeed run here. If I look at that user item, we see the location has been updated. So you can, of course, update as many fields as you want, including your custom fields. Now there are a few notes I wanna make once as we get to the end of this little guide here. Firstly, Clearbit provides no guarantee around what data is going to be returned on an individual.\u003C/p>\u003Cp>So what you may wanna do is add an additional step here between the request and the update data to check what exists and only add that into the payload. The reason that's important is if there is a value which is null or empty and it already exists in your director's project, it will be overwritten by the empty value. So you wanna add an extra step in there. Next, you have to be cognizant of the fact that every time you make a request to clear it, it's using up API quota or maybe costing money if you have paid agreements with them. So you may not wanna do this automatically on every user.\u003C/p>\u003Cp>You may want to do this manually. Perhaps you add a button on a user profile that will trigger this, or you do it based on some other event, like they get in touch with the salesperson or there's a record created in another collection or so on. The final thing to note is today we use the enrichment API that Clearbit provides. They have an additional API called the combined enrichment API that will also return a bunch of information about a company. So that could be useful.\u003C/p>\u003Cp>You know, you create a collection full of companies, and then you link users directly to their companies automatically using the ClearBit data. So I hope you enjoyed this episode of quick connect I'll see you next time\u003C/p>","Clearbit provides an enrichment API that lets you send off just an email address for a person, and it will return a much more full contact containing all the information that Clearbit provides. In this episode of Quick Connect, we're going to automatically enrich data from new users in our director's project. So with that, let's get started. The first thing you'll need is a Clearbit API key, which you can get inside of the Clearbit dashboard. It's called the secret API key, so just go ahead and copy that. And be certain that no one else ever gets hold of this key. Otherwise, they can also perform requests against your Clearbit account. I save this for later, and let's head back over to our Director's project to set up our connection. Here in our directors project, let's create a new flow. I'm gonna call this one clearbit enrichment. For the trigger, we're going to use an event hook and make this action non blocking. That means that an item, a user will be entered into the database and the clear bit enrichment will happen in parallel, which means we don't slow down or otherwise prevent the user creation from taking place. The scope is going to be items dot create on the directors users collection. Hit save to set up our trigger. Before we continue, we are going to see what the shape of the data is when a new user is created. So let's save this flow, head over to our user module, and create a new user. I'll just call this one joeblogs, very original, joe@example.com, and hit save. Let's go back over to our flow and we'll see here that there is a one inside of the logs. So this is one time that this flow has been triggered. We see here in the payload that we get a first name, a last name, and an email, and indeed any other information provided on creation of this user. So take a note here that we have payload email. This is where the email lies, which we're going to need in a moment to use Clearbit. Also, take a note that this user, this item in the director's users collection has a unique key. We'll also need this later when we update the user record with the Clearbit data. The next thing we're gonna do is create an operation I'm gonna call this one clear bit and take note that the key is also Clearbit or lowercase. This key will be important later. We're going to request a URL, and we're going to send a get request to the Clearbit API. So as per the Clearbit documentation, we're using the email people enrichment endpoint. So person.clearbit.comvone/people/email, and then a dynamic value based on the value of the trigger. So just before it was joe@example.com. Now to authenticate with Clearbit, we're going to need to add a header. So we're gonna go ahead and do that. The header name is authorization, and the value is bearer space, and then your Clearbit API key. Once again, make sure other people don't see this key as they don't provide a way to invalidate and regenerate an API key manually. So let's go ahead and give this a test. Let's hit save. Let's go back to our user module. Let's create a new user. I'm going to use our CEO Ben's email address here and hit save. Let's go back to our flows. Let's look at our latest log here, and let's look at the returned payload from Clearbit. We see that it was a successful request. There's an there's a property called data, which is an object. And inside of this, a large object containing all the information that Clearbit holds on, Ben. So we have things like well, it's the Gravatar URL, so that's like the the avatar, LinkedIn usernames. We don't have a Twitter handle, but that would be here. GitHub username, Facebook. We also have things like where are they based, what's their time zone offset, and stuff like that. The website direct to. Now what we're gonna do just to demonstrate how you would then go and update a user is we're just gonna take the location here, and we're going to update Ben's brand new user profile with the location. If we take a look back at the user directory, a user object here, we see there's a bunch of provided fields, location, title, description, tags, and so on. You can add custom fields inside of the data model settings in your director's project. But seeing as location already exists, we'll just use that. So let's add one more operation here, and we're gonna make this an update data operation. I'm gonna go in here. And first thing we're gonna do is say we're gonna update an item in the directors users collection. The ID of the user we're updating will be trigger dot key. Remember I mentioned that earlier on? And the payload, the data we will actually be updating is the location. And we said that this would be clear bit dot data dot location. It's clear bit because that was the key on this step here that return the data. So let's hit save, save once more, and run this flow again. Just to keep this easy, I'm gonna delete this user and then recreate it so we don't get confused. Perfect. We'll hit save. In the background the moment I hit save the user was saved but also the Clearbit operation was run Firstly, we see that it did indeed run here. If I look at that user item, we see the location has been updated. So you can, of course, update as many fields as you want, including your custom fields. Now there are a few notes I wanna make once as we get to the end of this little guide here. Firstly, Clearbit provides no guarantee around what data is going to be returned on an individual. So what you may wanna do is add an additional step here between the request and the update data to check what exists and only add that into the payload. The reason that's important is if there is a value which is null or empty and it already exists in your director's project, it will be overwritten by the empty value. So you wanna add an extra step in there. Next, you have to be cognizant of the fact that every time you make a request to clear it, it's using up API quota or maybe costing money if you have paid agreements with them. So you may not wanna do this automatically on every user. You may want to do this manually. Perhaps you add a button on a user profile that will trigger this, or you do it based on some other event, like they get in touch with the salesperson or there's a record created in another collection or so on. The final thing to note is today we use the enrichment API that Clearbit provides. They have an additional API called the combined enrichment API that will also return a bunch of information about a company. So that could be useful. You know, you create a collection full of companies, and then you link users directly to their companies automatically using the ClearBit data. So I hope you enjoyed this episode of quick connect I'll see you next time",[214],"09b8019b-836c-466a-b39f-ccab18a909ce",[],{"id":140,"number":141,"show":122,"year":142,"episodes":217},[144,145,146,147,148,149,150,151],{"id":147,"slug":219,"vimeo_id":220,"description":221,"tile":222,"length":180,"resources":8,"people":223,"episode_number":225,"published":226,"title":227,"video_transcript_html":228,"video_transcript_text":229,"content":8,"seo":8,"status":130,"episode_people":230,"recommendations":232,"season":233},"openai","899796860","Use OpenAI's APIs to generate images with DALL·E and social posts with GPT-4.","b3d3b5a4-2946-4c72-bf88-e8bc933bd7c0",[224],{"name":164,"url":165},4,"2024-01-19","Generate Images and Social Posts with OpenAI","\u003Cp>Speaker 0: Chances are you've heard of OpenAI. They're the company behind ChatGPT. Now today in QuickConnect, we're going to integrate with OpenAI's APIs to use both their GPT 4 model and their DALL E image generation model to enrich data in our director's project. Here, I've got a set of articles, a set of recipes, and we'll be using OpenAI to write a social post for us to promote this article and to generate an image which we can then use, on the web page or in social posts. On the OpenAI side, all we're gonna need is an API key.\u003C/p>\u003Cp>Send over to your dashboard, create a secret key, take note of it somewhere. We'll use it later. And then we're ready to go over into directors and set things up there. So over in our directors project, let's create a new flow. Let's call this one generate social post, and we will use a manual trigger on the article's collection on the item page.\u003C/p>\u003Cp>So this is our trigger. I wanna show you something interesting about this. So if we go over and refresh our item, we now see we can generate a social post here. Going back over to the flow and refreshing it and taking a look at the logs, we see here that we only get the ID of the article, and we actually wanna use the title of the article. So the first thing we need to do is create an operation called article, and we're gonna read that whole the whole article object.\u003C/p>\u003Cp>So we'll be going into the articles collection with full access, and we will want the trigger dot Bobby dot keys 0. So that is inside of that object where the ID for this item is. So now article will return the entire object. Now it's time to go and actually go to open AI and get it to generate a post for us. So we'll create a new operation.\u003C/p>\u003Cp>I'll call it generate. And we'll make a web request. This will be a post request to their chat completions endpoint. Now we need to authenticate ourselves with our API key. We'll do that with a header.\u003C/p>\u003Cp>Authorization bearer space, and then our API key, and then our request body. Now it's a little bit of a big object, so I'm gonna copy and paste a starting point to discuss here. We'll be using the GPT 4 model, and there are 2 messages we're gonna send in an array. The first is a primer for the system to tell it what we expect of it. So we're telling it here.\u003C/p>\u003Cp>They're an editor of a food blog. They take in recipes. They spit out social posts and the audience is busy professionals with little time. And, of course, you can tweak this for your specific use case. And then we get to write our actual prompt itself.\u003C/p>\u003Cp>So I'll say write a Twitter post for our article dot title recipe. So this is a dynamic value that is returned out of this object here let's hit save let's hit save again and let's rerun this flow notice it takes a little bit longer to run this time that's because it's actually going off to open ai and back So if we refresh here, we'll see a new log appears. And we'll see the returned value contains a data object with an array called choices. And inside of this object is a message and content. And that is a viable social post, or at least a starting point something to help us out.\u003C/p>\u003Cp>So the last thing we want to do here is actually save that social post back to the item. So we will add one final operation here. We will update data. We will update an article using that same key from earlier. So, trigger dot Bobby dot keys 0, I believe is what it was.\u003C/p>\u003Cp>And the payload we want to send is going to be social. That's the name of the field. And the value, it's quite nested. I did take note of where it was earlier, though. Generate because that's the name of the operation that returns this value.\u003C/p>\u003Cp>Generate dot data dot choices, that was an array, dot message dot content so let's save that let's refresh here and let's just try this one more time So lemon herb roasted chicken. We will hit generate social post. We'll wait just a moment, and we should see over there in the social box, our social post has been created. So that's the first half of what we're doing today. This is a double whammy.\u003C/p>\u003Cp>So now we're gonna create a new flow to generate an image. So now let's create a second flow for the image generation. I'll call this 1 generate image. It will be a manual flow trigger again on articles on the item page, except this time, we're going to ask the user for an image prompt. So we will require a confirmation which will pop up a model you'll see in a moment.\u003C/p>\u003Cp>So I'll call this confirm, and we can provide any number of keys. We just want one prompt. Of course, you might choose to make this a little more descriptive to users, but we're just gonna add an input with the value well, with the name of prompt. So what we're gonna do actually, once again, let's see how this works. So let's go back to our item.\u003C/p>\u003Cp>Let's refresh. And inside of flows, there's now 2. Here's generate image, and here's our prompt. So I will call this lemon herb roasted chicken in a baking dish, and I'm sure you could create something a little more creative. Let's run the flow.\u003C/p>\u003Cp>Let's refresh our flow and take a look at where that prompt has gone. And just like before, inside of our body, we have an array of keys. So that's the ID of the article, but we also have this prompt here. So we're gonna use that and immediately go off to DALL E, the image generation endpoint, and we will, and we'll generate an image using that prompt. So we'll call this one generate.\u003C/p>\u003Cp>We'll make a web request again. It will be a post request to this URL. Once again, we need to validate ourselves, so we will add the same header as before, authorization bearer space token, and then we have the request body. This is what the request body looks like. We pass in the prompt.\u003C/p>\u003Cp>We specify the size from 1 of the provided allowed sizes. With DALL E 3 at the moment, you can only generate 1 variation at a time. But if you use other DALL E models, you might be able to generate multiple. We'll just use 1, and that's the name of the model. So let's hit save once again, and we will rerun this flow.\u003C/p>\u003Cp>We see that's taking a little longer because now it's actually going off and generating an image for us we'll wait for that to be complete here We'll go back to the flow. We'll refresh to look at the new log, and we'll see here that in the data that is returned is an array called data with a revised prompt and a URL. Now we want to import this image via URL into our directors project. Now flows don't provide a way to do that out of the box. So I'm actually using this lovely community, community extension, which does allow us to import images from external URLs.\u003C/p>\u003Cp>So I will link this along with the video, but I've already gone ahead and made that extension available in this director's project. So let's add a new operation here. We will use this file import. I think I'll call this one import, and we provide the URL. So that came from the step that we could generate dot data dot data and dot URL like so And what file import will do is return just a string, and the string is the ID of the newly imported file in Directus.\u003C/p>\u003Cp>So instead of demoing it at this point, we're just gonna go ahead straight away and include that image inside of our post. So update data once again, very similar to before, articles, full access. The ID again will be trigger dot Bobby dot keys 0, and the payload will be image, and we just want that value there. So import because it doesn't return an object. It just returns a string.\u003C/p>\u003Cp>So let's hit save, and that should be everything we need. So we have lemon herb roasted chicken here. So let's go ahead and generate the image. Let me get my prompt back. We'll run flow on the current item.\u003C/p>\u003Cp>We'll give that a few moments to complete. It should go off, generate the image, import it to direct us, and then update this item with that newly imported file. So let's see if that works. There we go. Fantastic.\u003C/p>\u003Cp>Hurrah. So in this episode of QuickConnect a little bit longer, we've done 2 things with the open API the open AI APIs. We have created a flow which on demand can create a social post. And the nice thing is if you don't like it or you don't like the image, you can just press the buttons again, and it will go ahead and regenerate them. And the second one generates an image with the DALL E, model.\u003C/p>\u003Cp>It imports it into Directus using that extension and then includes it here inside of the item. I hope you found this super interesting. I had a lot of fun putting this one together, and go forth and have fun. See you next time.\u003C/p>","Chances are you've heard of OpenAI. They're the company behind ChatGPT. Now today in QuickConnect, we're going to integrate with OpenAI's APIs to use both their GPT 4 model and their DALL E image generation model to enrich data in our director's project. Here, I've got a set of articles, a set of recipes, and we'll be using OpenAI to write a social post for us to promote this article and to generate an image which we can then use, on the web page or in social posts. On the OpenAI side, all we're gonna need is an API key. Send over to your dashboard, create a secret key, take note of it somewhere. We'll use it later. And then we're ready to go over into directors and set things up there. So over in our directors project, let's create a new flow. Let's call this one generate social post, and we will use a manual trigger on the article's collection on the item page. So this is our trigger. I wanna show you something interesting about this. So if we go over and refresh our item, we now see we can generate a social post here. Going back over to the flow and refreshing it and taking a look at the logs, we see here that we only get the ID of the article, and we actually wanna use the title of the article. So the first thing we need to do is create an operation called article, and we're gonna read that whole the whole article object. So we'll be going into the articles collection with full access, and we will want the trigger dot Bobby dot keys 0. So that is inside of that object where the ID for this item is. So now article will return the entire object. Now it's time to go and actually go to open AI and get it to generate a post for us. So we'll create a new operation. I'll call it generate. And we'll make a web request. This will be a post request to their chat completions endpoint. Now we need to authenticate ourselves with our API key. We'll do that with a header. Authorization bearer space, and then our API key, and then our request body. Now it's a little bit of a big object, so I'm gonna copy and paste a starting point to discuss here. We'll be using the GPT 4 model, and there are 2 messages we're gonna send in an array. The first is a primer for the system to tell it what we expect of it. So we're telling it here. They're an editor of a food blog. They take in recipes. They spit out social posts and the audience is busy professionals with little time. And, of course, you can tweak this for your specific use case. And then we get to write our actual prompt itself. So I'll say write a Twitter post for our article dot title recipe. So this is a dynamic value that is returned out of this object here let's hit save let's hit save again and let's rerun this flow notice it takes a little bit longer to run this time that's because it's actually going off to open ai and back So if we refresh here, we'll see a new log appears. And we'll see the returned value contains a data object with an array called choices. And inside of this object is a message and content. And that is a viable social post, or at least a starting point something to help us out. So the last thing we want to do here is actually save that social post back to the item. So we will add one final operation here. We will update data. We will update an article using that same key from earlier. So, trigger dot Bobby dot keys 0, I believe is what it was. And the payload we want to send is going to be social. That's the name of the field. And the value, it's quite nested. I did take note of where it was earlier, though. Generate because that's the name of the operation that returns this value. Generate dot data dot choices, that was an array, dot message dot content so let's save that let's refresh here and let's just try this one more time So lemon herb roasted chicken. We will hit generate social post. We'll wait just a moment, and we should see over there in the social box, our social post has been created. So that's the first half of what we're doing today. This is a double whammy. So now we're gonna create a new flow to generate an image. So now let's create a second flow for the image generation. I'll call this 1 generate image. It will be a manual flow trigger again on articles on the item page, except this time, we're going to ask the user for an image prompt. So we will require a confirmation which will pop up a model you'll see in a moment. So I'll call this confirm, and we can provide any number of keys. We just want one prompt. Of course, you might choose to make this a little more descriptive to users, but we're just gonna add an input with the value well, with the name of prompt. So what we're gonna do actually, once again, let's see how this works. So let's go back to our item. Let's refresh. And inside of flows, there's now 2. Here's generate image, and here's our prompt. So I will call this lemon herb roasted chicken in a baking dish, and I'm sure you could create something a little more creative. Let's run the flow. Let's refresh our flow and take a look at where that prompt has gone. And just like before, inside of our body, we have an array of keys. So that's the ID of the article, but we also have this prompt here. So we're gonna use that and immediately go off to DALL E, the image generation endpoint, and we will, and we'll generate an image using that prompt. So we'll call this one generate. We'll make a web request again. It will be a post request to this URL. Once again, we need to validate ourselves, so we will add the same header as before, authorization bearer space token, and then we have the request body. This is what the request body looks like. We pass in the prompt. We specify the size from 1 of the provided allowed sizes. With DALL E 3 at the moment, you can only generate 1 variation at a time. But if you use other DALL E models, you might be able to generate multiple. We'll just use 1, and that's the name of the model. So let's hit save once again, and we will rerun this flow. We see that's taking a little longer because now it's actually going off and generating an image for us we'll wait for that to be complete here We'll go back to the flow. We'll refresh to look at the new log, and we'll see here that in the data that is returned is an array called data with a revised prompt and a URL. Now we want to import this image via URL into our directors project. Now flows don't provide a way to do that out of the box. So I'm actually using this lovely community, community extension, which does allow us to import images from external URLs. So I will link this along with the video, but I've already gone ahead and made that extension available in this director's project. So let's add a new operation here. We will use this file import. I think I'll call this one import, and we provide the URL. So that came from the step that we could generate dot data dot data and dot URL like so And what file import will do is return just a string, and the string is the ID of the newly imported file in Directus. So instead of demoing it at this point, we're just gonna go ahead straight away and include that image inside of our post. So update data once again, very similar to before, articles, full access. The ID again will be trigger dot Bobby dot keys 0, and the payload will be image, and we just want that value there. So import because it doesn't return an object. It just returns a string. So let's hit save, and that should be everything we need. So we have lemon herb roasted chicken here. So let's go ahead and generate the image. Let me get my prompt back. We'll run flow on the current item. We'll give that a few moments to complete. It should go off, generate the image, import it to direct us, and then update this item with that newly imported file. So let's see if that works. There we go. Fantastic. Hurrah. So in this episode of QuickConnect a little bit longer, we've done 2 things with the open API the open AI APIs. We have created a flow which on demand can create a social post. And the nice thing is if you don't like it or you don't like the image, you can just press the buttons again, and it will go ahead and regenerate them. And the second one generates an image with the DALL E, model. It imports it into Directus using that extension and then includes it here inside of the item. I hope you found this super interesting. I had a lot of fun putting this one together, and go forth and have fun. See you next time.",[231],"be12dd08-61b5-4003-9279-72cebbbf494f",[],{"id":140,"number":141,"show":122,"year":142,"episodes":234},[144,145,146,147,148,149,150,151],{"id":148,"slug":236,"vimeo_id":237,"description":238,"tile":239,"length":240,"resources":241,"people":245,"episode_number":247,"published":248,"title":249,"video_transcript_html":250,"video_transcript_text":251,"content":8,"seo":8,"status":130,"episode_people":252,"recommendations":254,"season":255},"clarifai","898557806","Automatically tag new image files with Clarifai's Image Recognition Model.","60c0e3e7-7141-4121-afd9-fa25e460d161",10,[242],{"name":243,"url":244},"Clarifai Image API Reference","https://docs.clarifai.com/api-guide/predict/images/",[246],{"name":164,"url":165},5,"2024-01-26","Tag Images Automatically with Clarifai","\u003Cp>Speaker 0: Clarify allow you to train and use machine learning models via APIs. Today in QuickConnect, we're going to integrate clarify with directors using directors flows. So whenever we upload a new image to our directors project, we will go off to clarify, understand what's in the image, and update the tags in an image accordingly. Here's an image uploaded in my director's project, and we see here that there are tags. So what we'll be doing is populating this with what we believe is in the image based on a Clarify output.\u003C/p>\u003Cp>So with that, let's get started. When you sign up for a Clarify account, they'll prompt you to create a project, which is totally fine. Projects contain individual permissions and API keys, which you can manage within the scope of the project. Now for today's, today's example, we're actually just going to use the account level personal access token. So even if you don't have a project, head over to your user settings to the security section and copy this personal access token here containing all scopes.\u003C/p>\u003Cp>Just a reminder, other people shouldn't see this key. And if people do, you can go ahead and delete it and create a brand new one. So that's what we need to do here. Let's head over to our director's project and set this up. In your directors project, create a brand new flow.\u003C/p>\u003Cp>I'm going to call this one tag images with clarify. It's going to be a non blocking event hook with a scope of files dot upload, which means this flow will automatically be triggered whenever a new file is uploaded. Let's actually test that out. Let's upload this nice image of a curry that I had recently, and we'll head back to our flow here and refresh. And we should see over in logs that it has been run one time.\u003C/p>\u003Cp>Now inside of the payload, we see that we have this object. The object has a key. So this is a unique identifier for this file within the director's files collection. And there is this payload object with a bunch of properties, including a type. Now this is important because this flow will actually run when we upload any file regardless of file type.\u003C/p>\u003Cp>So the first thing we're gonna wanna do is just filter this down to make sure this flow only continues if the file is an image. So let's create a new condition. I'll call this one is image, and I'll use the following condition rule, which just means that the type has to contain the word image. That means it doesn't matter if it's a JPEG, a PNG, a webp file, a GIF, whatever, as as long as it contains the word image inside of the file type, which it will for any of those types, we can continue. Now we're gonna go ahead and actually call the clarify API.\u003C/p>\u003Cp>So let's create a new operation, and we'll call this one clarify. We'll be making a web request. It will be a post request to this specific URL provided by the Clarify documentation. This is a built in model that they provide called general image recognition, and we're gonna be using this specific trained version of the model. Once again, I've grabbed that straight from their documentation.\u003C/p>\u003Cp>Now we need to authenticate, but it is us. So authorize station authorization. Yeah. Key and then our personal access token. Finally, we need to actually provide the URL of the image we want it to interpret.\u003C/p>\u003Cp>It's a little bit of a convoluted object here. So I've copied and pasted our starter. But what's important is that every image has a direct URL that we can provide to it from the director's project. That is the full URL of your project slash assets and then slash the actual ID or key of that file. And we've already seen that during the trigger.\u003C/p>\u003Cp>It's called trigger dot key. Now this will work if the file permissions are public, but if they're not, we need to provide a way for Clarify to actually access this image. And we can do that by adding a query parameter called access token and then an access token of a user that has access to that image. So let's hit save, and let's try running that again. So let's just delete this image, and we will reupload it.\u003C/p>\u003Cp>We'll head back to our flow. We'll refresh it, and we will see the output of the second invocation of this flow. So if we head to the payload here, we can see a a ton of information about the model itself. What else have we got here? The input image that we provided here, and then all of these concepts, food, meal, dinner, no person, vegetable grow, and so on.\u003C/p>\u003Cp>Now every one of these also has a value between 0 and 1, which is the confidence that it is correct. Now what we wanna do here is we want to end up to create our tags, filtering out any that have 2 lower value and then creating just an array of strings. So the next step here is to create a an operation called transcript. Here we go. So I'll call this one concepts.\u003C/p>\u003Cp>Now that was a big object. I've obviously done a little bit of prep work, so we don't have to go, you know, traversing through this object. I know that the data we want is inside of data dot clarify because that is the name of this step that's hidden by this pane here. Clarify dot outputs, that is an array. We just want the first one.\u003C/p>\u003Cp>Dot data dot concepts. And each one of those is one of those objects that contains the name of the of the concept and the score and so on. So we'll return this, but we wanna do a little bit more work to it. Firstly, we wanna go ahead and filter. We only want concepts where the score or I think they call it the value, so where the concept dot value is greater than 0.95.\u003C/p>\u003Cp>And depending on your use case, you can decide what, what's the correct value is for you. Then we want to go ahead and map it. So we only get come on, Kevin, you can type. So we only get the name. So let's save this.\u003C/p>\u003Cp>Let's run this again, and we'll see the output of this concept script. And what we should get is just an array of strings of the concepts, which have a score of greater than 0.95. Let's just double check that that is correct here. So let's reupload the picture of our food. Let's refresh this and we should see another invocation here yes Oh, that was a little error there.\u003C/p>\u003Cp>So let's head back and figure out what's gone wrong here. Clarify dot outputs. Ah, I don't think this is right. I think it was actually nested inside of the data, the data value. Let's try that again.\u003C/p>\u003Cp>Let's delete this, re upload it refresh There we go. So there's our array of strings, which is fantastic. And this just leaves us with one final step, which is to actually update the tags of that of that newly uploaded images with those values. So let's go ahead and update image. Let's update data.\u003C/p>\u003Cp>This is gonna be inside of the Directus files collection. We'll give it full access. We only want to upload the IDs of the images which match the key of the starting image. It'll only update 1. And finally, the payload here is going to be tags, concepts like so.\u003C/p>\u003Cp>And we can do that because context was an array that was returned. So let's hit save. Let's save this off, and let's give this one final go. We'll delete this image. We will re upload it.\u003C/p>\u003Cp>And in theory, if I open it now, we should see the image is tagged, which you can then use in API requests in order to build applications on top of this data. So I hope you found this interesting. Clarify have a ton of trained models, so we can actually do a lot more with it. But in this small example, you can see how to get started, explore the Clarify documentation, and do even more. Until the next episode, see you later.\u003C/p>","Clarify allow you to train and use machine learning models via APIs. Today in QuickConnect, we're going to integrate clarify with directors using directors flows. So whenever we upload a new image to our directors project, we will go off to clarify, understand what's in the image, and update the tags in an image accordingly. Here's an image uploaded in my director's project, and we see here that there are tags. So what we'll be doing is populating this with what we believe is in the image based on a Clarify output. So with that, let's get started. When you sign up for a Clarify account, they'll prompt you to create a project, which is totally fine. Projects contain individual permissions and API keys, which you can manage within the scope of the project. Now for today's, today's example, we're actually just going to use the account level personal access token. So even if you don't have a project, head over to your user settings to the security section and copy this personal access token here containing all scopes. Just a reminder, other people shouldn't see this key. And if people do, you can go ahead and delete it and create a brand new one. So that's what we need to do here. Let's head over to our director's project and set this up. In your directors project, create a brand new flow. I'm going to call this one tag images with clarify. It's going to be a non blocking event hook with a scope of files dot upload, which means this flow will automatically be triggered whenever a new file is uploaded. Let's actually test that out. Let's upload this nice image of a curry that I had recently, and we'll head back to our flow here and refresh. And we should see over in logs that it has been run one time. Now inside of the payload, we see that we have this object. The object has a key. So this is a unique identifier for this file within the director's files collection. And there is this payload object with a bunch of properties, including a type. Now this is important because this flow will actually run when we upload any file regardless of file type. So the first thing we're gonna wanna do is just filter this down to make sure this flow only continues if the file is an image. So let's create a new condition. I'll call this one is image, and I'll use the following condition rule, which just means that the type has to contain the word image. That means it doesn't matter if it's a JPEG, a PNG, a webp file, a GIF, whatever, as as long as it contains the word image inside of the file type, which it will for any of those types, we can continue. Now we're gonna go ahead and actually call the clarify API. So let's create a new operation, and we'll call this one clarify. We'll be making a web request. It will be a post request to this specific URL provided by the Clarify documentation. This is a built in model that they provide called general image recognition, and we're gonna be using this specific trained version of the model. Once again, I've grabbed that straight from their documentation. Now we need to authenticate, but it is us. So authorize station authorization. Yeah. Key and then our personal access token. Finally, we need to actually provide the URL of the image we want it to interpret. It's a little bit of a convoluted object here. So I've copied and pasted our starter. But what's important is that every image has a direct URL that we can provide to it from the director's project. That is the full URL of your project slash assets and then slash the actual ID or key of that file. And we've already seen that during the trigger. It's called trigger dot key. Now this will work if the file permissions are public, but if they're not, we need to provide a way for Clarify to actually access this image. And we can do that by adding a query parameter called access token and then an access token of a user that has access to that image. So let's hit save, and let's try running that again. So let's just delete this image, and we will reupload it. We'll head back to our flow. We'll refresh it, and we will see the output of the second invocation of this flow. So if we head to the payload here, we can see a a ton of information about the model itself. What else have we got here? The input image that we provided here, and then all of these concepts, food, meal, dinner, no person, vegetable grow, and so on. Now every one of these also has a value between 0 and 1, which is the confidence that it is correct. Now what we wanna do here is we want to end up to create our tags, filtering out any that have 2 lower value and then creating just an array of strings. So the next step here is to create a an operation called transcript. Here we go. So I'll call this one concepts. Now that was a big object. I've obviously done a little bit of prep work, so we don't have to go, you know, traversing through this object. I know that the data we want is inside of data dot clarify because that is the name of this step that's hidden by this pane here. Clarify dot outputs, that is an array. We just want the first one. Dot data dot concepts. And each one of those is one of those objects that contains the name of the of the concept and the score and so on. So we'll return this, but we wanna do a little bit more work to it. Firstly, we wanna go ahead and filter. We only want concepts where the score or I think they call it the value, so where the concept dot value is greater than 0.95. And depending on your use case, you can decide what, what's the correct value is for you. Then we want to go ahead and map it. So we only get come on, Kevin, you can type. So we only get the name. So let's save this. Let's run this again, and we'll see the output of this concept script. And what we should get is just an array of strings of the concepts, which have a score of greater than 0.95. Let's just double check that that is correct here. So let's reupload the picture of our food. Let's refresh this and we should see another invocation here yes Oh, that was a little error there. So let's head back and figure out what's gone wrong here. Clarify dot outputs. Ah, I don't think this is right. I think it was actually nested inside of the data, the data value. Let's try that again. Let's delete this, re upload it refresh There we go. So there's our array of strings, which is fantastic. And this just leaves us with one final step, which is to actually update the tags of that of that newly uploaded images with those values. So let's go ahead and update image. Let's update data. This is gonna be inside of the Directus files collection. We'll give it full access. We only want to upload the IDs of the images which match the key of the starting image. It'll only update 1. And finally, the payload here is going to be tags, concepts like so. And we can do that because context was an array that was returned. So let's hit save. Let's save this off, and let's give this one final go. We'll delete this image. We will re upload it. And in theory, if I open it now, we should see the image is tagged, which you can then use in API requests in order to build applications on top of this data. So I hope you found this interesting. Clarify have a ton of trained models, so we can actually do a lot more with it. But in this small example, you can see how to get started, explore the Clarify documentation, and do even more. Until the next episode, see you later.",[253],"b61f3dba-88a3-42de-9269-6d933731b5ac",[],{"id":140,"number":141,"show":122,"year":142,"episodes":256},[144,145,146,147,148,149,150,151],{"id":149,"slug":258,"vimeo_id":259,"description":260,"tile":261,"length":180,"resources":8,"people":262,"episode_number":264,"published":265,"title":266,"video_transcript_html":267,"video_transcript_text":268,"content":8,"seo":8,"status":130,"episode_people":269,"recommendations":271,"season":272},"netlify","899795774","Increase visibility of build status by showing them in Directus.","ac4ea98a-a6ac-40c7-8c36-89c41992f27d",[263],{"name":164,"url":165},6,"2024-02-02","Show Build Status with Netlify","\u003Cp>Speaker 0: Netlify is a platform that lets you build and deploy web applications. So it will take code, it will run any build scripts that are required, and then it will deploy the output of that build script. Now this episode of quick connect is all about increasing visibility into a specific deploy, so people know whether it has been successful or not directly from within directors. So people who author content or author updates to content can see when that content has been successfully deployed or perhaps when it hasn't because there's been a problem, and they know that there's some intervention needed. So let's get started.\u003C/p>\u003Cp>The first thing we're going to do is head over to flows, and we're going to create a new flow. I'll call this deploy updates, And we're gonna make this a webhook trigger with a post method. Once we create this flow, we are now given a URL. Whenever this URL receives a post request, the flow will begin. So now we're gonna head over to Netlify, and I've set up this little project here.\u003C/p>\u003Cp>Very minimal, but it does have a build step, which is important. And we're going to create a new deploy notification, which is an HTTP post request. And in here, we're gonna put the URL that we have just generated from our flow. Now when it comes to which event to listen for, we are going to pick deploy started. So this URL will receive an update whenever a deploy has started.\u003C/p>\u003Cp>We'll hit save. While we're here, we're also going to immediately add a second post request for when a deploy has succeeded, and we're gonna send that to exactly the same URL. We're just gonna do when they've started and succeeded today, but in reality, you'll want to set up a third one for when something has not succeeded, when there's been an error. This is actually all we need to do right here, right now. Let's head over to Directus.\u003C/p>\u003Cp>Actually, no. Let's not head over to Directus. Let's trigger a deploy and see what happens. So let's deploy our website. And what Netlify is now doing is it's going to grab the code.\u003C/p>\u003Cp>It's going to grab all of the dependencies, then it's going to execute the build script that I have provided, which should take almost no time whatsoever again because it's a very minimal site here. There we go. They'll all fill in, and now the site is live. So let's refresh our flow here. And I'm not sure what you might be expecting, but there are actually 2 invocations of this flow.\u003C/p>\u003Cp>The first one, if we look at the payload, has some interesting information. Firstly, it has a build ID. So this is for any given deploy, this build ID will stay the same. It has the name of the website, which is also useful because one Netlify account, could be hosting multiple websites, and you may want this flow for multiple sites. For example, at Directus, we have a website, and we have the dots, and they are both deployed by Netlify.\u003C/p>\u003Cp>And we may wanna reuse this flow to share the logic. We also have, what else is interesting here? The state. So at the beginning, a state is always building. That is the first step pair after initialization.\u003C/p>\u003Cp>Now if we look at the second invocation, it looks almost identical, except the state is now ready because the build succeeded. The state could also not be ready. It could indicate there's an error, should there be 1. But, otherwise, it's the same. The build ID remains the same.\u003C/p>\u003Cp>The name of the site stays the same. So first thing we're gonna do here is actually first thing we're gonna do is determine whether or not this is a brand new item or whether this is an update for an existing deploy. So what we'll do is we'll create a condition. I will put in this little default here, except I'm gonna change the values. So it's going to be Bobby.\u003C/p>\u003Cp>Inside of the body, there was a state, and we want to check whether that state is equal to, oops, building. Oh, building. Like so. If it is equal to building, that means this is a brand new entry. Otherwise, it means it's an update, and either there's been a success or a failure.\u003C/p>\u003Cp>Now in the case that this is successful, we want to store this new deploy in directors. So let's set up a collection for this. I'll call it deploys, and I'm going to make the primary key build underscore ID and make that a manually entered string. There are loads of values we can add here. I think what's useful for now is date created and date updated.\u003C/p>\u003Cp>Going into the data model, we want to add a state. You could, of course, chain covers as displayed. I'm just gonna make it a text input for now to keep it easy. The name of the website itself. And I think that's all we need, the build ID, the name of the site, and the state.\u003C/p>\u003Cp>Fantastic. So now let's go back to flows. And let's say when this is a brand new item, I e, building is true, we're going to create a new item in the deploys collection. We'll give it full access, and we're going to save 3 keys. Firstly, the build ID, which is required, is going to be equal to trigger dot body dot build underscore ID.\u003C/p>\u003Cp>Next was the state, and finally, the site. The order doesn't really matter here they'll all get saved in the correct fields. Great. I'm gonna hit save. I'm just gonna double check.\u003C/p>\u003Cp>Was it called site? No. It's called name. So, I will just go and edit that. I had a had a feeling it was wrong.\u003C/p>\u003Cp>So that's name, and the value is actually also name in the payload. So let's hit save, and let's retrigger a deploy. We'll just have to wait a moment for that deploy to start and then complete. And what we should see is a new item is created in the collection when it is first when the build first starts, And nothing should currently happen when we get the updated, when we get the update saying that it has been successful. So we see successful successful.\u003C/p>\u003Cp>Refresh here, and we should see 4 logs. Correct. The first one has been created in the collection. We see there. Quick connect demo building, and there's the build ID in Netlify.\u003C/p>\u003Cp>Now the latest one has a validation error because it was false, which means nothing then happened, which is fantastic. The last thing to really do here is on the reject route of the condition is we're gonna update data. We know it already exists because the first, the first payload we get for each deploy creates the item. Now we just get to update it, which is nice. So what we're gonna do here is we're going to update trigger dot Bobby dot build ID because we know that that is the unique identifier for the item in the collection.\u003C/p>\u003Cp>And all we're gonna do here is update state. Oops. So that's going to be, the trigger dot body dot state, which could be success already, or it could be something else. So let's hit save. Let's save again.\u003C/p>\u003Cp>Let's retrigger that deploy one more time. And you can imagine that this deploy isn't triggered manually. It's triggered because a user has updated something indirect or for some other reason, there is a a rebuild of the application that's taking place. So it's going ahead. It's building it.\u003C/p>\u003Cp>It's going through all those steps. If we look over here, we now see building. If I refresh again, still building. If I refresh again, still building. What's happened there?\u003C/p>\u003Cp>Let's take a look. Oh, you don't have permission to access this. This had incorrect permissions. This wants to be full access. Fantastic.\u003C/p>\u003Cp>So let's go, and we will now trigger that deploy one more time just to validate everything is set up correctly. So we should expect this to take 10 ish seconds, 10, 15 seconds like it has in the past. Basically, the moment it hits that build step is pretty instant just because of this specific applications. But builds can take, you know, 10 minutes sometimes, if necessary. So now if we look at this collection, we see that it's ready.\u003C/p>\u003Cp>So that's the 3rd deploy. We did one at the very start. We did one just now to test it, and that was a problem. And now that we see it's ready. So that is a little bit of, of information on how you can connect Netlify.\u003C/p>\u003Cp>We can also see date created, date updated. You know, we can sort by date created, date updated, whatever. And then, you can use this to find the deploy that's just happened and, share that with your team. Hope you found this in this episode of quick connect interesting. This general setup will work across hosting platforms.\u003C/p>\u003Cp>The Netlify is the one we use, so it's the one that I wanted to show off. Until next time, have a fantastic day, and I'll see you in the next episode.\u003C/p>","Netlify is a platform that lets you build and deploy web applications. So it will take code, it will run any build scripts that are required, and then it will deploy the output of that build script. Now this episode of quick connect is all about increasing visibility into a specific deploy, so people know whether it has been successful or not directly from within directors. So people who author content or author updates to content can see when that content has been successfully deployed or perhaps when it hasn't because there's been a problem, and they know that there's some intervention needed. So let's get started. The first thing we're going to do is head over to flows, and we're going to create a new flow. I'll call this deploy updates, And we're gonna make this a webhook trigger with a post method. Once we create this flow, we are now given a URL. Whenever this URL receives a post request, the flow will begin. So now we're gonna head over to Netlify, and I've set up this little project here. Very minimal, but it does have a build step, which is important. And we're going to create a new deploy notification, which is an HTTP post request. And in here, we're gonna put the URL that we have just generated from our flow. Now when it comes to which event to listen for, we are going to pick deploy started. So this URL will receive an update whenever a deploy has started. We'll hit save. While we're here, we're also going to immediately add a second post request for when a deploy has succeeded, and we're gonna send that to exactly the same URL. We're just gonna do when they've started and succeeded today, but in reality, you'll want to set up a third one for when something has not succeeded, when there's been an error. This is actually all we need to do right here, right now. Let's head over to Directus. Actually, no. Let's not head over to Directus. Let's trigger a deploy and see what happens. So let's deploy our website. And what Netlify is now doing is it's going to grab the code. It's going to grab all of the dependencies, then it's going to execute the build script that I have provided, which should take almost no time whatsoever again because it's a very minimal site here. There we go. They'll all fill in, and now the site is live. So let's refresh our flow here. And I'm not sure what you might be expecting, but there are actually 2 invocations of this flow. The first one, if we look at the payload, has some interesting information. Firstly, it has a build ID. So this is for any given deploy, this build ID will stay the same. It has the name of the website, which is also useful because one Netlify account, could be hosting multiple websites, and you may want this flow for multiple sites. For example, at Directus, we have a website, and we have the dots, and they are both deployed by Netlify. And we may wanna reuse this flow to share the logic. We also have, what else is interesting here? The state. So at the beginning, a state is always building. That is the first step pair after initialization. Now if we look at the second invocation, it looks almost identical, except the state is now ready because the build succeeded. The state could also not be ready. It could indicate there's an error, should there be 1. But, otherwise, it's the same. The build ID remains the same. The name of the site stays the same. So first thing we're gonna do here is actually first thing we're gonna do is determine whether or not this is a brand new item or whether this is an update for an existing deploy. So what we'll do is we'll create a condition. I will put in this little default here, except I'm gonna change the values. So it's going to be Bobby. Inside of the body, there was a state, and we want to check whether that state is equal to, oops, building. Oh, building. Like so. If it is equal to building, that means this is a brand new entry. Otherwise, it means it's an update, and either there's been a success or a failure. Now in the case that this is successful, we want to store this new deploy in directors. So let's set up a collection for this. I'll call it deploys, and I'm going to make the primary key build underscore ID and make that a manually entered string. There are loads of values we can add here. I think what's useful for now is date created and date updated. Going into the data model, we want to add a state. You could, of course, chain covers as displayed. I'm just gonna make it a text input for now to keep it easy. The name of the website itself. And I think that's all we need, the build ID, the name of the site, and the state. Fantastic. So now let's go back to flows. And let's say when this is a brand new item, I e, building is true, we're going to create a new item in the deploys collection. We'll give it full access, and we're going to save 3 keys. Firstly, the build ID, which is required, is going to be equal to trigger dot body dot build underscore ID. Next was the state, and finally, the site. The order doesn't really matter here they'll all get saved in the correct fields. Great. I'm gonna hit save. I'm just gonna double check. Was it called site? No. It's called name. So, I will just go and edit that. I had a had a feeling it was wrong. So that's name, and the value is actually also name in the payload. So let's hit save, and let's retrigger a deploy. We'll just have to wait a moment for that deploy to start and then complete. And what we should see is a new item is created in the collection when it is first when the build first starts, And nothing should currently happen when we get the updated, when we get the update saying that it has been successful. So we see successful successful. Refresh here, and we should see 4 logs. Correct. The first one has been created in the collection. We see there. Quick connect demo building, and there's the build ID in Netlify. Now the latest one has a validation error because it was false, which means nothing then happened, which is fantastic. The last thing to really do here is on the reject route of the condition is we're gonna update data. We know it already exists because the first, the first payload we get for each deploy creates the item. Now we just get to update it, which is nice. So what we're gonna do here is we're going to update trigger dot Bobby dot build ID because we know that that is the unique identifier for the item in the collection. And all we're gonna do here is update state. Oops. So that's going to be, the trigger dot body dot state, which could be success already, or it could be something else. So let's hit save. Let's save again. Let's retrigger that deploy one more time. And you can imagine that this deploy isn't triggered manually. It's triggered because a user has updated something indirect or for some other reason, there is a a rebuild of the application that's taking place. So it's going ahead. It's building it. It's going through all those steps. If we look over here, we now see building. If I refresh again, still building. If I refresh again, still building. What's happened there? Let's take a look. Oh, you don't have permission to access this. This had incorrect permissions. This wants to be full access. Fantastic. So let's go, and we will now trigger that deploy one more time just to validate everything is set up correctly. So we should expect this to take 10 ish seconds, 10, 15 seconds like it has in the past. Basically, the moment it hits that build step is pretty instant just because of this specific applications. But builds can take, you know, 10 minutes sometimes, if necessary. So now if we look at this collection, we see that it's ready. So that's the 3rd deploy. We did one at the very start. We did one just now to test it, and that was a problem. And now that we see it's ready. So that is a little bit of, of information on how you can connect Netlify. We can also see date created, date updated. You know, we can sort by date created, date updated, whatever. And then, you can use this to find the deploy that's just happened and, share that with your team. Hope you found this in this episode of quick connect interesting. This general setup will work across hosting platforms. The Netlify is the one we use, so it's the one that I wanted to show off. Until next time, have a fantastic day, and I'll see you in the next episode.",[270],"3585c63c-5d15-4bd8-a0d6-78c536d31dc7",[],{"id":140,"number":141,"show":122,"year":142,"episodes":273},[144,145,146,147,148,149,150,151],{"id":150,"slug":275,"vimeo_id":276,"description":277,"tile":278,"length":180,"resources":8,"people":279,"episode_number":131,"published":281,"title":282,"video_transcript_html":283,"video_transcript_text":284,"content":8,"seo":8,"status":130,"episode_people":285,"recommendations":287,"season":288},"twilio","899797484","Use Twilio's Lookup API to automatically verify numbers submitted by new users.","2cae565a-c475-4c5f-938c-13bc74c79696",[280],{"name":164,"url":165},"2024-02-09","Validate Phone Numbers with Twilio","\u003Cp>Speaker 0: Twilio provide a lookup API, which allows you to validate a phone number against a set of different criteria. And today in quick connect, we're going to use it to validate that a new user's phone number is a mobile number. If it is, that user can be created. And if it isn't, then we will block creation until they provide a mobile number. So with that, let's get started.\u003C/p>\u003Cp>In your Twilio console, take note of your account SID and auth token. Make sure to keep these secret. We'll be using them later. Before starting this recording, I also added a phone string field, to our direct to c users collection. So on top of the additional kind of required fields, we now also have a phone number being stored.\u003C/p>\u003Cp>So let's get started with our flow. Inside of directors, create a new flow. I'm going to call this one validate phone is mobile. We're going to make this an event hook, but, critically, we're going to make it a blocking hook, which means all of the logic of this flow will be executed before the database transaction is made, meaning that if we create some form of error inside of the flow, the user will not be created. Inside of collections, make sure direct to users is ticked and, and that the scope is items dot create.\u003C/p>\u003Cp>You may additionally choose to run this logic on items dot update, but for now, we'll stick with create. So once a user is created, let's actually do that. All we're gonna do is add a phone number here. Here we go. We'll hit check, and we'll refresh just to see what that data looks like.\u003C/p>\u003Cp>We see that inside of payload are all of the fields that had data entered into them, including, and in this case, only phone. So there's our phone number there. Twilio also provides some tools to help you format phone numbers that may be formatted inconsistently, but that's outside of the scope of this episode. So we are formatting this as required by Twilio with the plus, the country code, and then the phone number itself. Let's go and do a lookup.\u003C/p>\u003Cp>So we'll call this lookup lookup And we're going to do this as a webhook request URL, a get request to this URL which will look up phone numbers And then in here, we're going to insert the phone number of the new user. So trigger dot payload dot phone. We will get some data back if we stop here, but we're additionally going to add the line type intelligence data package, I think they called them, which will provide more information about the number, including what we need. So field capital f equals line type intelligence. Fantastic.\u003C/p>\u003Cp>Now we need to authenticate ourselves with our SID and our auth token authorization now authorization. Now this is important because what Twilio require is what is known as basic h HTTP auth and flows only, only support adding headers. So what we're gonna do here is use the word basic, and now we need to turn our SID and token into a value that will be supported in this format. This is how you do it on the terminal. So you start with the first command, you store your Sid.\u003C/p>\u003Cp>So you replace this with your Sid, you replace this with your token, and then you run this command in your terminal with the dollar signs. And what that will do is spit out a string, which you can use. I've done this ahead of time. So heading back to our flow, we provide the output of what was in our terminal for our auth. Let's hit save now, and let's run this to test it.\u003C/p>\u003Cp>So let's once again create a new user. We'll put in a valid phone number. We'll go back to our flow and refresh to see our new log, and we'll look at the latest run. And we see here that inside of data, we have some information like the country call, the calling country code, the country code, the actual two digits for the for the country, the national format, the international format. But what we also have here is the line type intelligence, which was the data packet that we asked for.\u003C/p>\u003Cp>Now for some applications, it's important that the phone number provided is a mobile, isn't a virtual number, and can support SMS, of which type mobile is true. There's a whole list in the Twilio documentation as to which types can exist. But right now, for this example, we're going to check that mobile exists. So what we're gonna do here is check inside of data, line type intelligence type, and make sure that that is mobile. So let's add a condition in here, and what we're gonna do is check that the lookup data Line line type intelligence, like so, is, there was one inside of it, type.\u003C/p>\u003Cp>That's right. Type is equal to mobile. Fantastic. So this is us just traversing that object that Twilio provided back. We'll hit save.\u003C/p>\u003Cp>And so when it's true, we're actually going to add nothing. We're going to say, hey. This flow concludes. There was no error. This was blocking.\u003C/p>\u003Cp>Now it isn't blocking anymore. Go create the user. But in the case that it fails, we want to stop this in its steps. And the way we're gonna do that is we're gonna run a script. It'll not terribly elegant, but we're just gonna throw a new error.\u003C/p>\u003Cp>So throw new error. Incorrect, phone type must be a mobile. And let's save let's try this once more let's create the user and this should be successful Fantastic. And I don't have another phone number to test it with, but that means all of this was successful, and we went out the success path, which then just, resolved. And if that was not true, if this condition validation was not met, we would run the script and throw an error.\u003C/p>\u003Cp>Now there's a lot we can do with the lookup API, actually. You can check which carrier it is. You can check the fraud risk of a phone number. And all of those, basically, here's all the things we can do. The SMS pumping risk, whether or not the number has been reassigned, whether or not there's been a SIM swap, and so on.\u003C/p>\u003Cp>And all you would do is add extra query parameters to the end of the lookup and then change the conditional to check that certain criteria are met. So this was a little simple example of using the Twilio lookup API to validate phone numbers against some property that is provided by Twilio. You can tweak this for your specific use cases and build more secure applications. Hope you enjoyed this episode of quick connect, and I'll see you in the next episode.\u003C/p>","Twilio provide a lookup API, which allows you to validate a phone number against a set of different criteria. And today in quick connect, we're going to use it to validate that a new user's phone number is a mobile number. If it is, that user can be created. And if it isn't, then we will block creation until they provide a mobile number. So with that, let's get started. In your Twilio console, take note of your account SID and auth token. Make sure to keep these secret. We'll be using them later. Before starting this recording, I also added a phone string field, to our direct to c users collection. So on top of the additional kind of required fields, we now also have a phone number being stored. So let's get started with our flow. Inside of directors, create a new flow. I'm going to call this one validate phone is mobile. We're going to make this an event hook, but, critically, we're going to make it a blocking hook, which means all of the logic of this flow will be executed before the database transaction is made, meaning that if we create some form of error inside of the flow, the user will not be created. Inside of collections, make sure direct to users is ticked and, and that the scope is items dot create. You may additionally choose to run this logic on items dot update, but for now, we'll stick with create. So once a user is created, let's actually do that. All we're gonna do is add a phone number here. Here we go. We'll hit check, and we'll refresh just to see what that data looks like. We see that inside of payload are all of the fields that had data entered into them, including, and in this case, only phone. So there's our phone number there. Twilio also provides some tools to help you format phone numbers that may be formatted inconsistently, but that's outside of the scope of this episode. So we are formatting this as required by Twilio with the plus, the country code, and then the phone number itself. Let's go and do a lookup. So we'll call this lookup lookup And we're going to do this as a webhook request URL, a get request to this URL which will look up phone numbers And then in here, we're going to insert the phone number of the new user. So trigger dot payload dot phone. We will get some data back if we stop here, but we're additionally going to add the line type intelligence data package, I think they called them, which will provide more information about the number, including what we need. So field capital f equals line type intelligence. Fantastic. Now we need to authenticate ourselves with our SID and our auth token authorization now authorization. Now this is important because what Twilio require is what is known as basic h HTTP auth and flows only, only support adding headers. So what we're gonna do here is use the word basic, and now we need to turn our SID and token into a value that will be supported in this format. This is how you do it on the terminal. So you start with the first command, you store your Sid. So you replace this with your Sid, you replace this with your token, and then you run this command in your terminal with the dollar signs. And what that will do is spit out a string, which you can use. I've done this ahead of time. So heading back to our flow, we provide the output of what was in our terminal for our auth. Let's hit save now, and let's run this to test it. So let's once again create a new user. We'll put in a valid phone number. We'll go back to our flow and refresh to see our new log, and we'll look at the latest run. And we see here that inside of data, we have some information like the country call, the calling country code, the country code, the actual two digits for the for the country, the national format, the international format. But what we also have here is the line type intelligence, which was the data packet that we asked for. Now for some applications, it's important that the phone number provided is a mobile, isn't a virtual number, and can support SMS, of which type mobile is true. There's a whole list in the Twilio documentation as to which types can exist. But right now, for this example, we're going to check that mobile exists. So what we're gonna do here is check inside of data, line type intelligence type, and make sure that that is mobile. So let's add a condition in here, and what we're gonna do is check that the lookup data Line line type intelligence, like so, is, there was one inside of it, type. That's right. Type is equal to mobile. Fantastic. So this is us just traversing that object that Twilio provided back. We'll hit save. And so when it's true, we're actually going to add nothing. We're going to say, hey. This flow concludes. There was no error. This was blocking. Now it isn't blocking anymore. Go create the user. But in the case that it fails, we want to stop this in its steps. And the way we're gonna do that is we're gonna run a script. It'll not terribly elegant, but we're just gonna throw a new error. So throw new error. Incorrect, phone type must be a mobile. And let's save let's try this once more let's create the user and this should be successful Fantastic. And I don't have another phone number to test it with, but that means all of this was successful, and we went out the success path, which then just, resolved. And if that was not true, if this condition validation was not met, we would run the script and throw an error. Now there's a lot we can do with the lookup API, actually. You can check which carrier it is. You can check the fraud risk of a phone number. And all of those, basically, here's all the things we can do. The SMS pumping risk, whether or not the number has been reassigned, whether or not there's been a SIM swap, and so on. And all you would do is add extra query parameters to the end of the lookup and then change the conditional to check that certain criteria are met. So this was a little simple example of using the Twilio lookup API to validate phone numbers against some property that is provided by Twilio. You can tweak this for your specific use cases and build more secure applications. Hope you enjoyed this episode of quick connect, and I'll see you in the next episode.",[286],"3444e114-c095-4239-8513-7dd6334394e9",[],{"id":140,"number":141,"show":122,"year":142,"episodes":289},[144,145,146,147,148,149,150,151],{"id":151,"slug":291,"vimeo_id":292,"description":293,"tile":294,"length":180,"resources":8,"people":295,"episode_number":201,"published":297,"title":298,"video_transcript_html":299,"video_transcript_text":300,"content":8,"seo":8,"status":130,"episode_people":301,"recommendations":303,"season":304},"github","899796327","Build a Flow to create GitHub issues from issues within Directus collections.","41d69174-d3db-4fd9-b579-f9066dda70df",[296],{"name":164,"url":165},"2024-02-16","Create Issues from Directus with GitHub","\u003Cp>Speaker 0: At the bottom of almost every page in our documentation, we have this little custom built widget, which we use to gather feedback on what's good and what isn't so good and needs improvement. And all of this data ends up in a director's project, which we periodically look through. And then anything that needs actioning, we create issues for in the director's repository. Now, in today's episode of quick connect, we're going to build a little connection to GitHub that automates the act of adding of taking feedback in direct us and turning it into issues. So I'm really excited to show this to you.\u003C/p>\u003Cp>I've created this little sample repository. So there's my username, phase on overload slash sample repo. I'm pointing that out because it's included a little later on. In this little sample directors project I have, I've set up the following data model. The idea is that content is the user's actual feedback that they type, and then we will add a button to the side of the item.\u003C/p>\u003Cp>And then when we click on it, we will add an issue to that repository automatically after gathering a little actionable summary manually. And then we will populate it here. We'll go off to GitHub, make the issue, and return the issue number here so we can click right through to it. And the only thing we need for today other than, of course, the director's project is a personal access token from GitHub. There are loads of ways you can authenticate.\u003C/p>\u003Cp>We're gonna use these fine grained personal access tokens. You provide the access to the repository, and the only permission we require is read and write on issues because that's all this token is going to do. I've done this ahead of time. So with that, I think we're ready to get started. So over in Directus, let's create our flow.\u003C/p>\u003Cp>We're going to call this one add feedback item to GitHub. And in the trigger setup, we're gonna make this a manual trigger on the feedback collection, only on the item pages. Now there's a difference between a piece of feedback and the remedial action, and issues describe the remedial action that needs to take place. So we're gonna use a confirmation dialogue here. We're going to add 1 input.\u003C/p>\u003Cp>I'm going to call this input summary or maybe title. Yeah. Title. That will just be a string input and save. So now if we go to an item, we see here over on the side, there is this flow, add feedback item to GitHub.\u003C/p>\u003Cp>You click it, and it pops up this box where we can gather a title. For example, if we go find this very valid piece of feedback, which we got recently, I can add item feedback to GitHub. The issue is there are no docs on connecting an existing database. So the remedial action would be add docs on connecting existing databases. Right?\u003C/p>\u003Cp>Run flow on current item. Let's go to our flow and refresh it, and we'll see the one invocation of this flow. We see here that inside of the body, we have the title, which is what we entered into that box just now. And we have the key, the unique identifier for that piece of feedback in the director's collection. Now what isn't visible here is the actual feedback itself, which is important.\u003C/p>\u003Cp>So what we need to now do is go from the just having this key to having the entire feedback object. We'll do that with a read data, read data, operation. I think we'll call this one feedback. We'll give it full permissions from the feedback collection, and we want to access trigger dot body dot keys 0. So that's that ID.\u003C/p>\u003Cp>If we hit save now, this feedback operation will return the full object, including the including the content itself. Let's just quickly test that. Click this button. We'll paste in the title. We'll refresh the flow.\u003C/p>\u003Cp>So we get the new log. Let me see in the second payload, we get the content here. That'll be important to add into the context of the issue. Now we're actually gonna go off to GitHub, and we're gonna add this issue to our repository. So we'll call this 1 GitHub, and we'll make a web request.\u003C/p>\u003Cp>This should be a post request to the following API endpoint. /repos/username slash repository slash issues. And what we need to do is add a header to identify ourselves. This is where that personal access token is gonna come in. Authorization bearer space personal access token.\u003C/p>\u003Cp>Save. And now in the request body, we can add some additional, information for the issue. Now there's quite a lot we can add here. I think we're gonna keep it quite minimal for the sake of this episode of quick connect. We will add a title.\u003C/p>\u003Cp>And what we'll do for the title is we will use that prompt that we entered. So trigger dot body dot title. And let's actually add the users, let's add the users feedback as written in the body in the actual, like, comments that goes with the PR. Let's make it a quote so it's clear. This is marked down now, and we want feedback dot content.\u003C/p>\u003Cp>Let's just make this really clear. We'll add a couple of new lines, and we'll say this feedback was provided, via the docs feedback widget. Just so it's clear that this is a quote and where that quote has come from. Let's, let's remove that with underscore in the key and hit save. Let's save that.\u003C/p>\u003Cp>Let's add this feedback again, and let's run flow on current item. Let's go back to our issues for this repository and hit refresh, and we see that the new item has been successfully created. And in there, there's the quote, and then there is the, the additional text. We can do lots when we create an issue, including adding labels, adding to projects, but you can refer to the API documentation, about these extra bits and what's suitable for your use case. Now this, this issue is issue number 7.\u003C/p>\u003Cp>Let's refresh and look at the logs, of that issue. Oh, that's the first time. This is the latest time. And we see this big payload has come back inside of data here. We have the title, which was our little summary that we typed in, and we also have the issue number 7.\u003C/p>\u003Cp>Just before continuing, I just wanna note that inside of these little button links here, I've added a view on GitHub button, which is specifically linked to that repository. So that is the repository name, and it'll append the issue number. So what we wanna store an issue is just the number. Let's add one final operation here. Let's make this an update data, operation to feedback, full access.\u003C/p>\u003Cp>Once again, we're gonna update the trigger dot, trigger dot body dot keys 0. That's it. And the payload that we're gonna add here, what we're actually gonna update with is the summary, which is our remedial action. You could choose to call this action, but, you know, it's called summary here. So that's gonna be the trigger dot body dot title what we entered in that pop up box and the issue number itself.\u003C/p>\u003Cp>GitHub. And the reason we can call it GitHub is that was the name of the operation, which returned the data dot data dot number. So let's hit save. And now we should be able to hang out just in here. Let's, pick another piece of feedback here.\u003C/p>\u003Cp>We will add this feedback to GitHub, clarify extension location, run flow on current item. Oh, we got a little undefined there, so something's wrong. The issue is indeed 8, which is great. There was one issue there. The summary is undefined.\u003C/p>\u003Cp>So let's just dig into that for a moment. The value we want is in payload body title. Let's update this. Trigger 2 g's dot body dot title. That was just a little typo there.\u003C/p>\u003Cp>So let's do this once more. This one's kind of already done. Let's do this one again. Here we are. Oh, not there.\u003C/p>\u003Cp>We'll run the flow. Boom. There we go. So that's the summary we typed in. That's the issue number.\u003C/p>\u003Cp>We click the button. The issue is open, and now there's this lovely link between where feedback is provided and tracking the remedial action. The other nice thing about this, of course, is inside of our items, we can do a filter. So we can say, hey. I only want to show where, you know, the issue is empty.\u003C/p>\u003Cp>IE, these are the items that still require some form of of response if appropriate. So, yeah, this was a huge amount of fun. It was just a little quick project, which I think is gonna actually enrich my day to day life. So I'm quite chuffed about that. Thank you so much for joining me in QuickConnect.\u003C/p>\u003Cp>It's been a pleasure. Have a wonderful day. Bye for now.\u003C/p>","At the bottom of almost every page in our documentation, we have this little custom built widget, which we use to gather feedback on what's good and what isn't so good and needs improvement. And all of this data ends up in a director's project, which we periodically look through. And then anything that needs actioning, we create issues for in the director's repository. Now, in today's episode of quick connect, we're going to build a little connection to GitHub that automates the act of adding of taking feedback in direct us and turning it into issues. So I'm really excited to show this to you. I've created this little sample repository. So there's my username, phase on overload slash sample repo. I'm pointing that out because it's included a little later on. In this little sample directors project I have, I've set up the following data model. The idea is that content is the user's actual feedback that they type, and then we will add a button to the side of the item. And then when we click on it, we will add an issue to that repository automatically after gathering a little actionable summary manually. And then we will populate it here. We'll go off to GitHub, make the issue, and return the issue number here so we can click right through to it. And the only thing we need for today other than, of course, the director's project is a personal access token from GitHub. There are loads of ways you can authenticate. We're gonna use these fine grained personal access tokens. You provide the access to the repository, and the only permission we require is read and write on issues because that's all this token is going to do. I've done this ahead of time. So with that, I think we're ready to get started. So over in Directus, let's create our flow. We're going to call this one add feedback item to GitHub. And in the trigger setup, we're gonna make this a manual trigger on the feedback collection, only on the item pages. Now there's a difference between a piece of feedback and the remedial action, and issues describe the remedial action that needs to take place. So we're gonna use a confirmation dialogue here. We're going to add 1 input. I'm going to call this input summary or maybe title. Yeah. Title. That will just be a string input and save. So now if we go to an item, we see here over on the side, there is this flow, add feedback item to GitHub. You click it, and it pops up this box where we can gather a title. For example, if we go find this very valid piece of feedback, which we got recently, I can add item feedback to GitHub. The issue is there are no docs on connecting an existing database. So the remedial action would be add docs on connecting existing databases. Right? Run flow on current item. Let's go to our flow and refresh it, and we'll see the one invocation of this flow. We see here that inside of the body, we have the title, which is what we entered into that box just now. And we have the key, the unique identifier for that piece of feedback in the director's collection. Now what isn't visible here is the actual feedback itself, which is important. So what we need to now do is go from the just having this key to having the entire feedback object. We'll do that with a read data, read data, operation. I think we'll call this one feedback. We'll give it full permissions from the feedback collection, and we want to access trigger dot body dot keys 0. So that's that ID. If we hit save now, this feedback operation will return the full object, including the including the content itself. Let's just quickly test that. Click this button. We'll paste in the title. We'll refresh the flow. So we get the new log. Let me see in the second payload, we get the content here. That'll be important to add into the context of the issue. Now we're actually gonna go off to GitHub, and we're gonna add this issue to our repository. So we'll call this 1 GitHub, and we'll make a web request. This should be a post request to the following API endpoint. /repos/username slash repository slash issues. And what we need to do is add a header to identify ourselves. This is where that personal access token is gonna come in. Authorization bearer space personal access token. Save. And now in the request body, we can add some additional, information for the issue. Now there's quite a lot we can add here. I think we're gonna keep it quite minimal for the sake of this episode of quick connect. We will add a title. And what we'll do for the title is we will use that prompt that we entered. So trigger dot body dot title. And let's actually add the users, let's add the users feedback as written in the body in the actual, like, comments that goes with the PR. Let's make it a quote so it's clear. This is marked down now, and we want feedback dot content. Let's just make this really clear. We'll add a couple of new lines, and we'll say this feedback was provided, via the docs feedback widget. Just so it's clear that this is a quote and where that quote has come from. Let's, let's remove that with underscore in the key and hit save. Let's save that. Let's add this feedback again, and let's run flow on current item. Let's go back to our issues for this repository and hit refresh, and we see that the new item has been successfully created. And in there, there's the quote, and then there is the, the additional text. We can do lots when we create an issue, including adding labels, adding to projects, but you can refer to the API documentation, about these extra bits and what's suitable for your use case. Now this, this issue is issue number 7. Let's refresh and look at the logs, of that issue. Oh, that's the first time. This is the latest time. And we see this big payload has come back inside of data here. We have the title, which was our little summary that we typed in, and we also have the issue number 7. Just before continuing, I just wanna note that inside of these little button links here, I've added a view on GitHub button, which is specifically linked to that repository. So that is the repository name, and it'll append the issue number. So what we wanna store an issue is just the number. Let's add one final operation here. Let's make this an update data, operation to feedback, full access. Once again, we're gonna update the trigger dot, trigger dot body dot keys 0. That's it. And the payload that we're gonna add here, what we're actually gonna update with is the summary, which is our remedial action. You could choose to call this action, but, you know, it's called summary here. So that's gonna be the trigger dot body dot title what we entered in that pop up box and the issue number itself. GitHub. And the reason we can call it GitHub is that was the name of the operation, which returned the data dot data dot number. So let's hit save. And now we should be able to hang out just in here. Let's, pick another piece of feedback here. We will add this feedback to GitHub, clarify extension location, run flow on current item. Oh, we got a little undefined there, so something's wrong. The issue is indeed 8, which is great. There was one issue there. The summary is undefined. So let's just dig into that for a moment. The value we want is in payload body title. Let's update this. Trigger 2 g's dot body dot title. That was just a little typo there. So let's do this once more. This one's kind of already done. Let's do this one again. Here we are. Oh, not there. We'll run the flow. Boom. There we go. So that's the summary we typed in. That's the issue number. We click the button. The issue is open, and now there's this lovely link between where feedback is provided and tracking the remedial action. The other nice thing about this, of course, is inside of our items, we can do a filter. So we can say, hey. I only want to show where, you know, the issue is empty. IE, these are the items that still require some form of of response if appropriate. So, yeah, this was a huge amount of fun. It was just a little quick project, which I think is gonna actually enrich my day to day life. So I'm quite chuffed about that. Thank you so much for joining me in QuickConnect. It's been a pleasure. Have a wonderful day. Bye for now.",[302],"cc2b4d61-d685-41c2-a1a7-a397140172aa",[],{"id":140,"number":141,"show":122,"year":142,"episodes":305},[144,145,146,147,148,149,150,151],{"id":138,"slug":307,"vimeo_id":308,"description":309,"tile":310,"length":240,"resources":8,"people":8,"episode_number":141,"published":311,"title":309,"video_transcript_html":312,"video_transcript_text":313,"content":8,"seo":314,"status":130,"episode_people":315,"recommendations":317,"season":318},"firecrawl","1026203173","Integrating Firecrawl with Directus","d0a87153-8475-433f-aca0-dea9802caf03","2024-10-16","\u003Cp>Speaker 0: Hello there. I'm really excited about this tutorial. So on Directus TV, we already have a show called Quick Connect, which shows you how to integrate third party services with Directus using Directus Automate and Flows. And in the spirit of that show, today, I'm gonna show you how to integrate FireCrawl with Directus. Now here they say that they turn websites into LLM ready data.\u003C/p>\u003Cp>And what that means in practice is you can feed it a URL, provide some options if you want, and it will go and take a look at that web page and return some structured data for you like so. This is their scrape endpoint, which will take a single web page and scrape some data from it. They also have a couple of other endpoints, crawl and map, but today, we're gonna use scrape. Now I've already logged into FireCrawl Cloud and generated an API key, which I'll copy for later. You can also self host FireCrawl, but for ease, I'm just gonna use their cloud product here.\u003C/p>\u003Cp>Now I have this directors project over here with a new empty collection called companies. In this collection, there are a few fields. URL, a name, a description, mission, and a boolean, a true force value, is it open source. And our goal will be to provide the URL and then have FireCrawl automatically populate the rest with flows. So let's go ahead and create a new flow.\u003C/p>\u003Cp>So this is our automation builder if you've not seen it before. I'll call this one get company data, I guess. And we are going to use a manual trigger, which will add a button to the side of collection and item pages. So we're going to say we'll run this on the company's collection. What else matters here?\u003C/p>\u003Cp>We're going to, not require selection, so the button always works. And we're going to require confirmation, which will pop up a modal. And in that modal, we will just add a URL. We'll make it a string input, and we'll make it full widths. And I think that's all we need to do here.\u003C/p>\u003Cp>So just to see what happens here, if I go back to the, to the company's collection, we now see this button here, this manual flow, trigger. I click that. It pulls up the box, and we'll put in a URL and hit run flow. So now if we go back to our flow, we should immediately see that there is one log. And in here, there is a body, and the URL is the value that we typed.\u003C/p>\u003Cp>Fantastic. Now we need to actually do something with it. So, let's go ahead and add a new operation here. And, honestly, fire crawl is pretty sick. You can just make one web request.\u003C/p>\u003Cp>Let's take a look at their docs. We're gonna use the LLM extract, endpoint here, and let's just take a look at the kind of construction of this API call. It's a post request to this URL. We're gonna pass in our, our API key here as an authorization header, and then they give us this kind of JSON payload here. Here, it's telling it to go ahead and extract specifically these four fields, the company mission, does it support SSO, is it open source, and is it in Y Combinator?\u003C/p>\u003Cp>And it's saying you must go get all four of these. So let's actually just turn this straight into a flow request, request URL, operation. So we're gonna do a post request to this URL, post request to this URL. I'm gonna go and copy my API key again here, and at the end of this I'll, I'll destroy the key. Authorization authorization, bearer API key, save, And then there's the request body.\u003C/p>\u003Cp>And, honestly, it contains a little more than we need, but this contains everything we need. So we'll just pop that in there. The only thing we wanna do, of course, is pass in the URL that we put in the box. So we'll replace this with trigger.body. URL.\u003C/p>\u003Cp>Fantastic. Let's save that and see what happens if we go over to content, press the button, and type in directors.io. We see that's running. That's running. That's a good sign.\u003C/p>\u003Cp>It means it's going off and making the request, waiting for the request. And then we see there is a second log. And we get some data back. There was a 200, so it was successful. Inside of data, there is a property called data, and then there is this value called extract.\u003C/p>\u003Cp>Extract contains all of those custom keys we asked for, company mission, supports SSO, is open source, and is in YC. And then always when you scrape, you get this metadata object, title, description, language, Open Graph data, source URL, and so on. So, really, all we wanna do here now is we wanna take this data and create a new company from it. So let's add a new, let's create a new operation on the resolve path of that web request. Let's call it create data.\u003C/p>\u003Cp>We're gonna create something in the company's collection. We'll give it full access, and then we just need to provide a payload. So let's go ahead and do that. We have an object here. We have a name.\u003C/p>\u003Cp>Oh, we have a name, and we're gonna pass in the value of the last operation dot data.data.metadata dot title. Then I, for one, am I'm just gonna copy this and edit it each time. So we have name, URL. So that's last .data.data.metadata. Source URL.\u003C/p>\u003Cp>We could, of course, just take it from the trigger body URL, but this is properly formatted. You'll notice I typed in directus dot I o, but when it came back in the payload, it came back with the with the, with the protocol HTTPS and so on. So we have name. We have URL. We have a description.\u003C/p>\u003Cp>Now this one is also from the metadata description. We have the mission. Now this was a custom piece of data we asked to be extracted. Company mission is what we called it. And finally, we have open, open underscore source.\u003C/p>\u003Cp>Last data, data extract and then is underscore open underscore source and then remove that trailing comma. So I believe that's the name of all of the fields. We'll figure it out in a moment when when it inevitably doesn't work. We'll hit the button again, directors.io, and hit run flow. Once again, that's going off to fire crawl using their endpoint and there we see there we see it straight here.\u003C/p>\u003Cp>URL name, description, mission, and the boolean is open source. Let's, let's try that once more. Let's go in here and say firefirecrawl.devrun flow. So let's see. And in theory, we should just give that a moment, and there it is.\u003C/p>\u003Cp>So now you can go ahead and grab more data. Now, of course, if we take a look at this endpoint here, you can provide custom properties, and it will try its best to get data out from that. They have a couple of other interesting things which I'll draw your attention to even if I don't think it works in this context. They have extracting without a schema. So this extract here was us creating a a schema.\u003C/p>\u003Cp>Right? You can give it just a text. You can give it a prompt. Extract the company mission from the page. But the thing I don't like about that is you're not explicitly saying what the name of the key is, so you don't necessarily know what it's gonna be at the end.\u003C/p>\u003Cp>I like creating a schema personally. They do something else that's kinda interesting. If I take a look at where is I think it's in their API API reference here inside of scrape. They have this interesting thing called actions. So you can get it to wait, to take a screenshot, to click, write text, press a key, and scroll.\u003C/p>\u003Cp>And the combination of clicking and writing text means you can get it to interact with your web page. You see it here, actions, wait two milliseconds. You could get it to, like, sign into things perhaps, perform search terms. I think it's super interesting. And then take screenshots, of course, and upload those to directors if you fancy.\u003C/p>\u003Cp>So there's a lot of flexibility in this. Having seen kinda how easy this API is, I think I'll go ahead and turn this into, an extension some point in the next few weeks, which we can release as part of Directus AI. But, yeah, that's how to integrate FireCrawl with Directus using Directus Automate. Hope you found this interesting, and by all means, if you have questions, just reach out.\u003C/p>","Hello there. I'm really excited about this tutorial. So on Directus TV, we already have a show called Quick Connect, which shows you how to integrate third party services with Directus using Directus Automate and Flows. And in the spirit of that show, today, I'm gonna show you how to integrate FireCrawl with Directus. Now here they say that they turn websites into LLM ready data. And what that means in practice is you can feed it a URL, provide some options if you want, and it will go and take a look at that web page and return some structured data for you like so. This is their scrape endpoint, which will take a single web page and scrape some data from it. They also have a couple of other endpoints, crawl and map, but today, we're gonna use scrape. Now I've already logged into FireCrawl Cloud and generated an API key, which I'll copy for later. You can also self host FireCrawl, but for ease, I'm just gonna use their cloud product here. Now I have this directors project over here with a new empty collection called companies. In this collection, there are a few fields. URL, a name, a description, mission, and a boolean, a true force value, is it open source. And our goal will be to provide the URL and then have FireCrawl automatically populate the rest with flows. So let's go ahead and create a new flow. So this is our automation builder if you've not seen it before. I'll call this one get company data, I guess. And we are going to use a manual trigger, which will add a button to the side of collection and item pages. So we're going to say we'll run this on the company's collection. What else matters here? We're going to, not require selection, so the button always works. And we're going to require confirmation, which will pop up a modal. And in that modal, we will just add a URL. We'll make it a string input, and we'll make it full widths. And I think that's all we need to do here. So just to see what happens here, if I go back to the, to the company's collection, we now see this button here, this manual flow, trigger. I click that. It pulls up the box, and we'll put in a URL and hit run flow. So now if we go back to our flow, we should immediately see that there is one log. And in here, there is a body, and the URL is the value that we typed. Fantastic. Now we need to actually do something with it. So, let's go ahead and add a new operation here. And, honestly, fire crawl is pretty sick. You can just make one web request. Let's take a look at their docs. We're gonna use the LLM extract, endpoint here, and let's just take a look at the kind of construction of this API call. It's a post request to this URL. We're gonna pass in our, our API key here as an authorization header, and then they give us this kind of JSON payload here. Here, it's telling it to go ahead and extract specifically these four fields, the company mission, does it support SSO, is it open source, and is it in Y Combinator? And it's saying you must go get all four of these. So let's actually just turn this straight into a flow request, request URL, operation. So we're gonna do a post request to this URL, post request to this URL. I'm gonna go and copy my API key again here, and at the end of this I'll, I'll destroy the key. Authorization authorization, bearer API key, save, And then there's the request body. And, honestly, it contains a little more than we need, but this contains everything we need. So we'll just pop that in there. The only thing we wanna do, of course, is pass in the URL that we put in the box. So we'll replace this with trigger.body. URL. Fantastic. Let's save that and see what happens if we go over to content, press the button, and type in directors.io. We see that's running. That's running. That's a good sign. It means it's going off and making the request, waiting for the request. And then we see there is a second log. And we get some data back. There was a 200, so it was successful. Inside of data, there is a property called data, and then there is this value called extract. Extract contains all of those custom keys we asked for, company mission, supports SSO, is open source, and is in YC. And then always when you scrape, you get this metadata object, title, description, language, Open Graph data, source URL, and so on. So, really, all we wanna do here now is we wanna take this data and create a new company from it. So let's add a new, let's create a new operation on the resolve path of that web request. Let's call it create data. We're gonna create something in the company's collection. We'll give it full access, and then we just need to provide a payload. So let's go ahead and do that. We have an object here. We have a name. Oh, we have a name, and we're gonna pass in the value of the last operation dot data.data.metadata dot title. Then I, for one, am I'm just gonna copy this and edit it each time. So we have name, URL. So that's last .data.data.metadata. Source URL. We could, of course, just take it from the trigger body URL, but this is properly formatted. You'll notice I typed in directus dot I o, but when it came back in the payload, it came back with the with the, with the protocol HTTPS and so on. So we have name. We have URL. We have a description. Now this one is also from the metadata description. We have the mission. Now this was a custom piece of data we asked to be extracted. Company mission is what we called it. And finally, we have open, open underscore source. Last data, data extract and then is underscore open underscore source and then remove that trailing comma. So I believe that's the name of all of the fields. We'll figure it out in a moment when when it inevitably doesn't work. We'll hit the button again, directors.io, and hit run flow. Once again, that's going off to fire crawl using their endpoint and there we see there we see it straight here. URL name, description, mission, and the boolean is open source. Let's, let's try that once more. Let's go in here and say firefirecrawl.devrun flow. So let's see. And in theory, we should just give that a moment, and there it is. So now you can go ahead and grab more data. Now, of course, if we take a look at this endpoint here, you can provide custom properties, and it will try its best to get data out from that. They have a couple of other interesting things which I'll draw your attention to even if I don't think it works in this context. They have extracting without a schema. So this extract here was us creating a a schema. Right? You can give it just a text. You can give it a prompt. Extract the company mission from the page. But the thing I don't like about that is you're not explicitly saying what the name of the key is, so you don't necessarily know what it's gonna be at the end. I like creating a schema personally. They do something else that's kinda interesting. If I take a look at where is I think it's in their API API reference here inside of scrape. They have this interesting thing called actions. So you can get it to wait, to take a screenshot, to click, write text, press a key, and scroll. And the combination of clicking and writing text means you can get it to interact with your web page. You see it here, actions, wait two milliseconds. You could get it to, like, sign into things perhaps, perform search terms. I think it's super interesting. And then take screenshots, of course, and upload those to directors if you fancy. So there's a lot of flexibility in this. Having seen kinda how easy this API is, I think I'll go ahead and turn this into, an extension some point in the next few weeks, which we can release as part of Directus AI. But, yeah, that's how to integrate FireCrawl with Directus using Directus Automate. Hope you found this interesting, and by all means, if you have questions, just reach out.","9c575c49-4fb1-4eed-9731-e1e1acb55da3",[316],"4830ae03-22cd-4399-82f2-5767bf5cf0cc",[],{"id":134,"number":135,"show":122,"year":136,"episodes":319},[138],{"reps":321},[322,378],{"name":323,"sdr":8,"link":324,"countries":325,"states":327},"John Daniels","https://meet.directus.io/meetings/john2144/john-contact-form-meeting",[326],"United States",[328,329,330,331,332,333,334,335,336,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,360,361,362,363,364,365,366,367,368,369,370,371,372,373,374,375,376,377],"Michigan","Indiana","Ohio","West Virginia","Kentucky","Virginia","Tennessee","North Carolina","South Carolina","Georgia","Florida","Alabama","Mississippi","New York","MI","IN","OH","WV","KY","VA","TN","NC","SC","GA","FL","AL","MS","NY","Connecticut","CT","Delaware","DE","Maine","ME","Maryland","MD","Massachusetts","MA","New Hampshire","NH","New Jersey","NJ","Pennsylvania","PA","Rhode Island","RI","Vermont","VT","Washington DC","DC",{"name":379,"link":380,"countries":381},"Michelle Riber","https://meetings.hubspot.com/mriber",[382,383,384,385,386,387,388,389,390,391,392,393,394,395,396,397,398,399,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,419,420,421,422,423,424,425,426,427,428,429,430,431,432,433,434,435,436,437,438,439,440,441,442,443,444,445,446,447,448,449,450,451,452,453,454,455,456,457,458,459,460,461,462,463,464,465,466,467,468,469,470,471,472,473,474,475,476,477,478,479,480,481,482,483,484,485,486,487,488,489,490,491,492,493,494,495,496,497,498,499,500,501,502,503,504,505,506,507,508,509,510,511,512,513,514,515,516,517,518,519,520,521,522,523,524,525,526,527,528,529,530,531,532,533,534,535,536,537,538,539,540,541,542,543,544,545,546,547,548,549,550,551,552,553,554,555,556,557,558,559,560,561,562,563,564,565,566,567,568,569,359,570,571],"Albania","ALB","Algeria","DZA","Andorra","AND","Angola","AGO","Austria","AUT","Belgium","BEL","Benin","BEN","Bosnia and Herzegovina","BIH","Botswana","BWA","Bulgaria","BGR","Burkina Faso","BFA","Burundi","BDI","Cameroon","CMR","Cape Verde","CPV","Central African Republic","CAF","Chad","TCD","Comoros","COM","Côte d'Ivoire","CIV","Croatia","HRV","Czech Republic","CZE","Democratic Republic of Congo","COD","Denmark","DNK","Djibouti","DJI","Egypt","EGY","Equatorial Guinea","GNQ","Eritrea","ERI","Estonia","EST","Eswatini","SWZ","Ethiopia","ETH","Finland","FIN","France","FRA","Gabon","GAB","Gambia","GMB","Ghana","GHA","Greece","GRC","Guinea","GIN","Guinea-Bissau","GNB","Hungary","HUN","Iceland","ISL","Ireland","IRL","Italy","ITA","Kenya","KEN","Latvia","LVA","Lesotho","LSO","Liberia","LBR","Libya","LBY","Liechtenstein","LIE","Lithuania","LTU","Luxembourg","LUX","Madagascar","MDG","Malawi","MWI","Mali","MLI","Malta","MLT","Mauritania","MRT","Mauritius","MUS","Moldova","MDA","Monaco","MCO","Montenegro","MNE","Morocco","MAR","Mozambique","MOZ","Namibia","NAM","Niger","NER","Nigeria","NGA","North Macedonia","MKD","Norway","NOR","Poland","POL","Portugal","PRT","Republic of Congo","COG","Romania","ROU","Rwanda","RWA","San Marino","SMR","São Tomé and Príncipe","STP","Senegal","SEN","Serbia","SRB","Seychelles","SYC","Sierra Leone","SLE","Slovakia","SVK","Slovenia","SVN","Somalia","SOM","South Africa","ZAF","South Sudan","SSD","Spain","ESP","Sudan","SDN","Sweden","SWE","Tanzania","TZA","Togo","TGO","Tunisia","TUN","Uganda","UGA","United Kingdom","GBR","Vatican City","VAT","Zambia","ZMB","Zimbabwe","ZWE","UK","Germany","Netherlands","Switzerland","CH","NL",1773850418715]