App Extension (Action Extension) doesn’t open - javascript

For some reason that I don't understand, the Action Extension Button (in Share menu) doesn't respond. Action extension, at this point, catches the URL from Safari (where it was launched from) to make some things after. As a layer between Web and extension there is JS file (maybe something wrong here, i just copied it)
ViewController:
class ActionViewController: UIViewController {
var SafariURL: NSURL!
override func viewDidLoad() {
super.viewDidLoad()
let extensionItem = extensionContext?.inputItems.first as? NSExtensionItem
let itemProvider = extensionItem!.attachments?.first as? NSItemProvider
let propertyList = String(kUTTypePropertyList)
if itemProvider!.hasItemConformingToTypeIdentifier(propertyList) {
print("I'm here2")
itemProvider!.loadItem(forTypeIdentifier: propertyList, options: nil, completionHandler: { (item, error) -> Void in
let dictionary = item as? NSDictionary
OperationQueue.main.addOperation {
let results = dictionary![NSExtensionJavaScriptPreprocessingResultsKey] as? NSDictionary
let urlString = results!["currentUrl"] as? String
self.SafariURL = NSURL(string: urlString!)
}
})
} else {
print("error")
}
}
#IBAction func done() {
// Return any edited content to the host app.
// This template doesn't do anything, so we just echo the passed in items.
self.extensionContext!.completeRequest(returningItems: self.extensionContext!.inputItems, completionHandler: nil)
}
JS File:
var GetURL = function() {};
GetURL.prototype = {
run: function(arguments) {
arguments.completionFunction({ "currentUrl" : document.URL });
},
finalize: function(arguments) {
var message = arguments["statusMessage"];
if (message) {
alert(message);
}
}
};
var ExtensionPreprocessingJS = new GetURL;

Finally, you should change a content of override func viewDidLoad to
super.viewDidLoad()
if let inputItem = extensionContext?.inputItems.first as? NSExtensionItem {
if let itemProvider = inputItem.attachments?.first {
itemProvider.loadItem(forTypeIdentifier: kUTTypePropertyList as String) { [self] (dict, error) in
guard let itemDictionary = dict as? NSDictionary else { return }
guard let javaScriptValues = itemDictionary[NSExtensionJavaScriptPreprocessingResultsKey] as? NSDictionary else { return }
self.Pageurl = javaScriptValues["URL"] as? String ?? ""
JS is ok!

Related

How I can return result of js or any function to the screen of iPhone? by ContentView

// ContentView.swift
// Shared
import Foundation
import SwiftUI
import JavaScriptCore
class cube {
var result: String
func do_js(text: String) -> String {
let jsSource = "var testFunct = function(message) { return \"Test Message: \" + message;}"
var context = JSContext()
context?.evaluateScript(jsSource)
let testFunction = context?.objectForKeyedSubscript("testFunct")
var result = testFunction?.call(withArguments: [text]).toString()
return result!
}
}
struct ContentView: View {
cube().do_js(text: "Hello world") // Starts forom here
var show_text = lol().result
var body: some View {
Text(show_text)
.font(.body)
.fontWeight(.black)
.foregroundColor(Color.red)
.padding()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
(Sorry, I'm beginner and come to swift even not from js, but from python! So it's incredibly new for me. But js more understandable for me from python.)
enter image description here
Here's the simplest version. In this version, it basically uses your original pattern where doJS returns a value. The disadvantage to this version is that doJS will get called every time the view renders.
class Cube {
func doJS(text: String) -> String? {
let jsSource = "var testFunct = function(message) { return \"Test Message: \" + message;}"
let context = JSContext()
context?.evaluateScript(jsSource)
let testFunction = context?.objectForKeyedSubscript("testFunct")
return testFunction?.call(withArguments: [text]).toString()
}
}
struct ContentView: View {
var body: some View {
Text(Cube().doJS(text: "Hello, world!") ?? "No result")
.font(.body)
.fontWeight(.black)
.foregroundColor(Color.red)
.padding()
}
}
And here's a slightly different version. In this version, Cube is an ObservableObject with a #Published value that stores the result. It will only get called once in onAppear.
class Cube : ObservableObject {
#Published var result : String?
func doJS(text: String) {
let jsSource = "var testFunct = function(message) { return \"Test Message: \" + message;}"
let context = JSContext()
context?.evaluateScript(jsSource)
let testFunction = context?.objectForKeyedSubscript("testFunct")
result = testFunction?.call(withArguments: [text]).toString()
}
}
struct ContentView: View {
#StateObject var cube = Cube()
var body: some View {
Text(cube.result ?? "No result")
.font(.body)
.fontWeight(.black)
.foregroundColor(Color.red)
.padding()
.onAppear {
cube.doJS(text: "Hello, world!")
}
}
}

How to use UIImagePickerController from webview

I'm following this tutorial online https://makeapppie.com/2016/06/28/how-to-use-uiimagepickercontroller-for-a-camera-and-photo-library-in-swift-3-0/ (with a little bit of a twist). I'm trying to call my UIImagePickerController from a webview and I'm not sure how to change the code to get it to work properly. The difference is that I'm going to be receiving a call from javascript and then invoking the picker as a result instead of with a UIButton. Then I want to send the image back as a base64 string using my javascript interface.
Here is what I have so far.
import UIKit
import WebKit
class ViewController: UIViewController,
WKScriptMessageHandler,
UIImagePickerControllerDelegate,
UINavigationControllerDelegate {
var webView: WKWebView?
let userContentController = WKUserContentController()
let picker = UIImagePickerController();
#IBAction func photoFromLibrary(_ sender: UIBarButtonItem) {
picker.allowsEditing = false
picker.sourceType = .photoLibrary
picker.mediaTypes = UIImagePickerController.availableMediaTypes(for: .photoLibrary)!
present(picker, animated: true, completion: nil)
}
override func loadView() {
super.loadView()
let config = WKWebViewConfiguration()
config.userContentController = userContentController
self.webView = WKWebView(frame: self.view.bounds, configuration: config)
userContentController.add(self, name: "iOS")
let url = URL(string:"https://relate.lavishweb.com/account")
let request = URLRequest(url: url!)
_ = webView?.load(request)
self.view = self.webView
}
override func viewDidLoad() {
super.viewDidLoad()
picker.delegate = self
}
func userContentController(_ userContentController: WKUserContentController, didReceive message: WKScriptMessage) {
webView?.evaluateJavaScript("window.settings.setImageBase64FromiOS()") { (result, error) in
if error != nil {
print("Success")
} else {
print("Failure")
}
}
// now use the name and token as you see fit!
}
func imagePickerController(_ picker: UIImagePickerController,
didFinishPickingMediaWithInfo info: [String : AnyObject])
{
let chosenImage = info[UIImagePickerControllerOriginalImage] as! UIImage //2
// myImageView.contentMode = .scaleAspectFit //3
// myImageView.image = chosenImage //4
//I want to do additional stuff here and send back as a base64 String
dismiss(animated:true, completion: nil) //5
}
func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
dismiss(animated: true, completion: nil)
}
}
Hi I have the same problem an ein solved it, but it's a little dirty.
In viewDidLoad() you override the Delegate from WKWebView. The WKWebView use the Delegate, too. You need to save the Delegate local.
var oldDelegate: UIImagePickerControllerDelegate?
override func viewDidLoad() {
super.viewDidLoad()
oldDelegate = picker.delegate // save the Delegate from WKWebView
picker.delegate = self
}
now you you can run your code in your delegate. In the end of your method imagePickerController() you have to invoke imagePickerController() from the old delegate.
func imagePickerController(_ picker: UIImagePickerController,
didFinishPickingMediaWithInfo info: [String : AnyObject]){
var myinfo = info
let chosenImage = info[UIImagePickerControllerOriginalImage] as! UIImage //2
// myImageView.contentMode = .scaleAspectFit //3
// myImageView.image = chosenImage //4
myinfo[UIImagePickerControllerOriginalImage] = chosenImage
myinfo[UIImagePickerControllerImageURL] = nil
oldDelegate?.imagePickerController!(picker, didFinishPickingMediaWithInfo: myinfo)
}
I set the URL nil, because the WKWebView load the image direct from drive if the URL is filled.
myinfo[UIImagePickerControllerImageURL] = nil
I hope this is helpful.
Okay I figured it out,
So I didn't know what IBAction was but basically that means that the function that I'm going to declare after it is something that is called by an Interface Builder Element such as a UIButton or something like that. After realizing this, I just changed the function to
func photoFromLibrary() {
picker.allowsEditing = false
picker.sourceType = .photoLibrary
picker.mediaTypes = UIImagePickerController.availableMediaTypes(for: .photoLibrary)!
present(picker, animated: true, completion: nil)
}
then in my JavaScript interface I simply called the function when I received the call from JavasScript.
func userContentController(_ userContentController: WKUserContentController, didReceive message: WKScriptMessage) {
webView?.evaluateJavaScript("window.settings.setImageBase64FromiOS()") { (result, error) in
if error != nil {
print("failure")
} else {
self.photoFromLibrary()
}
}
}
Stay tuned and I will post how to encode an image to a Base64 string while also shrinking it at load time.(Once I find out how to do that ofcourse)
EDIT: I figured out how to resize the image and convert to Base64 string very quickly.
I implemented this extension...
extension UIImage {
func resized(withPercentage percentage: CGFloat) -> UIImage? {
let canvasSize = CGSize(width: size.width * percentage, height: size.height * percentage)
UIGraphicsBeginImageContextWithOptions(canvasSize, false, scale)
defer { UIGraphicsEndImageContext() }
draw(in: CGRect(origin: .zero, size: canvasSize))
return UIGraphicsGetImageFromCurrentImageContext()
}
func resized(toWidth width: CGFloat) -> UIImage? {
let canvasSize = CGSize(width: width, height: CGFloat(ceil(width/size.width * size.height)))
UIGraphicsBeginImageContextWithOptions(canvasSize, false, scale)
defer { UIGraphicsEndImageContext() }
draw(in: CGRect(origin: .zero, size: canvasSize))
return UIGraphicsGetImageFromCurrentImageContext()
}
}
and then I returned the string like so...
func imagePickerController(_ picker: UIImagePickerController,
didFinishPickingMediaWithInfo info: [String : AnyObject])
{
let chosenImage = info[UIImagePickerControllerOriginalImage] as! UIImage
let thumb = chosenImage.resized(toWidth: 72.0)
let imageData:NSData = UIImagePNGRepresentation(thumb!)! as NSData
let dataImage = imageData.base64EncodedString(options: .lineLength64Characters)
print(dataImage)
dismiss(animated:true, completion: nil) //5
}

How to call swift function from javascript web view

I am launching a web-view from my swift app. From the web-view I want to call a swift function on a button click. I found the code sample here.
When I press the button, nothing happens. The URL I am loading in the web-view is https://test-swift-js.github.io
Here is the code :
import Cocoa
import WebKit
class ViewController : NSViewController {
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
override var representedObject: Any? {
didSet {
// Update the view, if already loaded.
}
}
var webView:WebView!
#IBAction func mybutton(_ sender: Any) {
let url = URL(string: "https://test-swift-js.github.io")
let request = URLRequest(url: url!)
webView = WebView(frame: NSRect(x: 0, y: 0, width: 1000, height: 1000))
webView.mainFrame.load(request)
self.view.addSubview(webView)
}
func webView(webView: WebView!, didClearWindowObject windowObject: WebScriptObject!, forFrame frame: WebFrame!) {
windowObject.setValue(self, forKey: "interOp")
}
//The name used to represent the Swift function name in Javascript.
class func webScriptNameForSelector(aSelector: Selector) -> String!
{
if aSelector == "callSwift:"
{
return "callSwift"
}
else
{
return nil
}
}
class func isSelectorExcludedFromWebScript(aSelector: Selector) -> Bool
{
return false
}
func callSwift(message:String)
{
var alert = NSAlert()
alert.messageText = message
alert.alertStyle = NSAlertStyle.informational
alert.beginSheetModal(for: self.view.window!, completionHandler: nil)
}
}
Maybe you forget to set the delegate.

use javascript data in tableView Swift

hello i'm new with swift programming and have the following issue. I read about how i can get data from safari in my action extension. using code inside viewDidLoad. so far so good.
i build a tableView with a tutorial and it works fine.
know i would like to use data data i extracted form safari in to my tableView. The problem is that my tableView is loaded first and after that my data is pulled from safari.
how can i use the data directly in my TableView?
here my code. beware i am new at this and it is under construction. The tableVie is now filled with information as demonstrated in the tutorial.
import UIKit
import MobileCoreServices
var webDataArray : [String] = []
class ActionViewController: UIViewController, UITableViewDataSource, UITableViewDelegate {
#IBOutlet weak var imageView: UIImageView!
#IBOutlet weak var myTableView: UITableView!
var arrayOfWebData: [CustomCellContents] = [CustomCellContents]()
var effectStannd = false
override func viewDidLoad() {
super.viewDidLoad()
for item: AnyObject in self.extensionContext!.inputItems {
let inputItem = item as! NSExtensionItem
for provider: AnyObject in inputItem.attachments! {
let itemProvider = provider as! NSItemProvider
if itemProvider.hasItemConformingToTypeIdentifier(kUTTypePropertyList as! String) {
itemProvider.loadItemForTypeIdentifier(kUTTypePropertyList as! String, options: nil, completionHandler: {(list, error) in
if let results = list as? NSDictionary {
NSOperationQueue.mainQueue().addOperationWithBlock {
var webData = results.description
webDataArray = split(webData) {$0 == ","}
var testArray = split(webDataArray[0]) {$0 == " "}
webDataArray.removeAtIndex(0)
webDataArray.append(testArray[7])
testArray = split(webDataArray[13]) {$0 == " "}
webDataArray.removeAtIndex(13)
webDataArray.append(testArray[1])
//println(webDataArray)
println(webDataArray.count)
}
}
})
}
}
}
println(webDataArray.count)
self.setUpWebData()
self.addEffect()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
#IBAction func done() {
// Return any edited content to the host app.
// This template doesn't do anything, so we just echo the passed in items.
self.extensionContext!.completeRequestReturningItems(self.extensionContext!.inputItems, completionHandler: nil)
}
func setUpWebData()
{
var webData1 = CustomCellContents(fileName: "Torrent", fileKind: "img1.jpeg")
var webData2 = CustomCellContents(fileName: "Magnet", fileKind: "img2.jpeg")
var webData3 = CustomCellContents(fileName: "Exe", fileKind: "img1.jpeg")
var webData4 = CustomCellContents(fileName: "DMG", fileKind: "img2.jpeg")
arrayOfWebData.append(webData1)
arrayOfWebData.append(webData2)
arrayOfWebData.append(webData3)
arrayOfWebData.append(webData4)
}
func tableView(tableView: UITableView, numberOfRowsInSection section: Int) -> Int
{
return arrayOfWebData.count
}
func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell
{
let cell: CustomCell = tableView.dequeueReusableCellWithIdentifier("Cell") as! CustomCell
let CustomCellContents = arrayOfWebData[indexPath.row]
cell.setCell(CustomCellContents.fileName, imageName: CustomCellContents.fileKind)
return cell
}
/////// Custom swip from Right
func tableView(tableView: UITableView, commitEditingStyle editingStyle: UITableViewCellEditingStyle, forRowAtIndexPath indexPath: NSIndexPath)
{
}
func tableView(tableView: UITableView, editActionsForRowAtIndexPath indexPath: NSIndexPath) -> [AnyObject]?
{
var shareAction = UITableViewRowAction(style: .Normal, title: "Download") { (action: UITableViewRowAction!, indexPath: NSIndexPath!) -> Void in
let firstActivityItem = self.arrayOfWebData[indexPath.row]
let activityViewControler = UIActivityViewController(activityItems: [firstActivityItem], applicationActivities: nil)
self.presentViewController(activityViewControler, animated: true, completion: nil)
}
shareAction.backgroundColor = UIColor.blueColor()
return [shareAction]
}
//// de volgende functie zorgt ervoor dat de rij automatisch word gedeselecteerd
func tableView(tableView: UITableView, didSelectRowAtIndexPath indexPath: NSIndexPath)
{
self.myTableView.deselectRowAtIndexPath(indexPath, animated: true)
}
func addEffect(){
if effectStannd{
var effect = UIBlurEffect(style: UIBlurEffectStyle.Light)
var effectView = UIVisualEffectView(effect: effect)
effectView.frame = CGRectMake(0, 0, 320, 600)
view.addSubview(effectView)
effectStannd = false
}
}
}
I found the solution in refreshing my tableview after loading. It works fine like this.
func didRefreshList(){
var tableView = myTableView
self.tableData = webDataArray3
tableView.reloadData()
self.refreshControl.endRefreshing()
}

Passing Sound (wav) file to javascript from objective c

I am recording a sound file ( wav format) in objective C. I want to pass this back to Javascript using Objective C stringByEvaluatingJavaScriptFromString. I am thinking that I will have to convert wav file to base64 string to pass it to this function. Then I will have to convert base64 string back to (wav/blob) format in javascript to pass it to audio tag to play it. I don't know how can I do that? Also not sure if that is best way to pass wave file back to javascript? Any ideas will be appreciated.
well, this was not straight forward as I expected. so here is how I was able to achieve this.
Step 1: I recorded the audio in caf format using AudioRecorder.
NSArray *dirPaths;
NSString *docsDir;
dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
docsDir = [dirPaths objectAtIndex:0];
soundFilePath = [docsDir stringByAppendingPathComponent:#"sound.caf"];
NSURL *soundFileURL = [NSURL fileURLWithPath:soundFilePath];
NSDictionary *recordSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:AVAudioQualityMin],
AVEncoderAudioQualityKey,
[NSNumber numberWithInt:16],
AVEncoderBitRateKey,
[NSNumber numberWithInt:2],
AVNumberOfChannelsKey,
[NSNumber numberWithFloat:44100],
AVSampleRateKey,
nil];
NSError *error = nil;
audioRecorder = [[AVAudioRecorder alloc]
initWithURL:soundFileURL
settings:recordSettings error:&error];
if(error)
{
NSLog(#"error: %#", [error localizedDescription]);
} else {
[audioRecorder prepareToRecord];
}
after this, you just need to call audioRecorder.record to record the audio. it will be recorded
in caf format. If you want to see my recordAudio function, then here it is.
(void) recordAudio
{
if(!audioRecorder.recording)
{
_playButton.enabled = NO;
_recordButton.title = #"Stop";
[audioRecorder record];
[self animate1:nil finished:nil context:nil];
}
else
{
[_recordingImage stopAnimating];
[audioRecorder stop];
_playButton.enabled = YES;
_recordButton.title = #"Record";
}
}
Step 2: Convert the caf format to wav format. This I was able to perform using following function.
-(BOOL)exportAssetAsWaveFormat:(NSString*)filePath
{
NSError *error = nil ;
NSDictionary *audioSetting = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[ NSNumber numberWithInt:2], AVNumberOfChannelsKey,
[ NSNumber numberWithInt:16], AVLinearPCMBitDepthKey,
[ NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
[ NSNumber numberWithBool:NO], AVLinearPCMIsFloatKey,
[ NSNumber numberWithBool:0], AVLinearPCMIsBigEndianKey,
[ NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
[ NSData data], AVChannelLayoutKey, nil ];
NSString *audioFilePath = filePath;
AVURLAsset * URLAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:audioFilePath] options:nil];
if (!URLAsset) return NO ;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:URLAsset error:&error];
if (error) return NO;
NSArray *tracks = [URLAsset tracksWithMediaType:AVMediaTypeAudio];
if (![tracks count]) return NO;
AVAssetReaderAudioMixOutput *audioMixOutput = [AVAssetReaderAudioMixOutput
assetReaderAudioMixOutputWithAudioTracks:tracks
audioSettings :audioSetting];
if (![assetReader canAddOutput:audioMixOutput]) return NO ;
[assetReader addOutput :audioMixOutput];
if (![assetReader startReading]) return NO;
NSString *title = #"WavConverted";
NSArray *docDirs = NSSearchPathForDirectoriesInDomains (NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docDir = [docDirs objectAtIndex: 0];
NSString *outPath = [[docDir stringByAppendingPathComponent :title]
stringByAppendingPathExtension:#"wav" ];
if(![[NSFileManager defaultManager] removeItemAtPath:outPath error:NULL])
{
return NO;
}
soundFilePath = outPath;
NSURL *outURL = [NSURL fileURLWithPath:outPath];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:outURL
fileType:AVFileTypeWAVE
error:&error];
if (error) return NO;
AVAssetWriterInput *assetWriterInput = [ AVAssetWriterInput assetWriterInputWithMediaType :AVMediaTypeAudio
outputSettings:audioSetting];
assetWriterInput. expectsMediaDataInRealTime = NO;
if (![assetWriter canAddInput:assetWriterInput]) return NO ;
[assetWriter addInput :assetWriterInput];
if (![assetWriter startWriting]) return NO;
//[assetReader retain];
//[assetWriter retain];
[assetWriter startSessionAtSourceTime:kCMTimeZero ];
dispatch_queue_t queue = dispatch_queue_create( "assetWriterQueue", NULL );
[assetWriterInput requestMediaDataWhenReadyOnQueue:queue usingBlock:^{
NSLog(#"start");
while (1)
{
if ([assetWriterInput isReadyForMoreMediaData] && (assetReader.status == AVAssetReaderStatusReading)) {
CMSampleBufferRef sampleBuffer = [audioMixOutput copyNextSampleBuffer];
if (sampleBuffer) {
[assetWriterInput appendSampleBuffer :sampleBuffer];
CFRelease(sampleBuffer);
} else {
[assetWriterInput markAsFinished];
break;
}
}
}
[assetWriter finishWriting];
//[self playWavFile];
NSError *err;
NSData *audioData = [NSData dataWithContentsOfFile:soundFilePath options: 0 error:&err];
[self.audioDelegate doneRecording:audioData];
//[assetReader release ];
//[assetWriter release ];
NSLog(#"soundFilePath=%#",soundFilePath);
NSDictionary *dict = [[NSFileManager defaultManager] attributesOfItemAtPath:soundFilePath error:&err];
NSLog(#"size of wav file = %#",[dict objectForKey:NSFileSize]);
//NSLog(#"finish");
}];
well in this function, i am calling audioDelegate function doneRecording with audioData which is
in wav format. Here is code for doneRecording.
-(void) doneRecording:(NSData *)contents
{
myContents = [[NSData dataWithData:contents] retain];
[self returnResult:alertCallbackId args:#"Recording Done.",nil];
}
// Call this function when you have results to send back to javascript callbacks
// callbackId : int comes from handleCall function
// args: list of objects to send to the javascript callback
- (void)returnResult:(int)callbackId args:(id)arg, ...;
{
if (callbackId==0) return;
va_list argsList;
NSMutableArray *resultArray = [[NSMutableArray alloc] init];
if(arg != nil){
[resultArray addObject:arg];
va_start(argsList, arg);
while((arg = va_arg(argsList, id)) != nil)
[resultArray addObject:arg];
va_end(argsList);
}
NSString *resultArrayString = [json stringWithObject:resultArray allowScalar:YES error:nil];
[self performSelectorOnMainThread:#selector(stringByEvaluatingJavaScriptFromString:) withObject:[NSString stringWithFormat:#"NativeBridge.resultForCallback(%d,%#);",callbackId,resultArrayString] waitUntilDone:NO];
[resultArray release];
}
Step 3: Now it is time to communicate back to javascript inside UIWebView that we are done recording
the audio so you can start accepting data in blocks from us. I am using websockets to
transfer data back to javascript. The data will be transferred in blocks
because server(https://github.com/benlodotcom/BLWebSocketsServer) that I was using, was build using
libwebsockets(http://git.warmcat.com/cgi-bin/cgit/libwebsockets/).
This is how you start the server in delegate class.
- (id)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
[self _createServer];
[self.server start];
myContents = [NSData data];
// Set delegate in order to "shouldStartLoadWithRequest" to be called
self.delegate = self;
// Set non-opaque in order to make "body{background-color:transparent}" working!
self.opaque = NO;
// Instanciate JSON parser library
json = [ SBJSON new ];
// load our html file
NSString *path = [[NSBundle mainBundle] pathForResource:#"webview-document" ofType:#"html"];
[self loadRequest:[NSURLRequest requestWithURL:[NSURL fileURLWithPath:path]]];
}
return self;
}
-(void) _createServer
{
/*Create a simple echo server*/
self.server = [[BLWebSocketsServer alloc] initWithPort:9000 andProtocolName:echoProtocol];
[self.server setHandleRequestBlock:^NSData *(NSData *data) {
NSString *convertedString = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
NSLog(#"Received Request...%#",convertedString);
if([convertedString isEqualToString:#"start"])
{
NSLog(#"myContents size: %d",[myContents length]);
int contentSize = [myContents length];
int chunkSize = 64*1023;
chunksCount = ([myContents length]/(64*1023))+1;
NSLog(#"ChunkSize=%d",chunkSize);
NSLog(#"chunksCount=%d",chunksCount);
chunksArray = [[NSMutableArray array] retain];
int index = 0;
//NSRange chunkRange;
for(int i=1;i<=chunksCount;i++)
{
if(i==chunksCount)
{
NSRange chunkRange = {index,contentSize-index};
NSLog(#"chunk# = %d, chunkRange=(%d,%d)",i,index,contentSize-index);
NSData *dataChunk = [myContents subdataWithRange:chunkRange];
[chunksArray addObject:dataChunk];
break;
}
else
{
NSRange chunkRange = {index, chunkSize};
NSLog(#"chunk# = %d, chunkRange=(%d,%d)",i,index,chunkSize);
NSData *dataChunk = [myContents subdataWithRange:chunkRange];
index += chunkSize;
[chunksArray addObject:dataChunk];
}
}
return [chunksArray objectAtIndex:0];
}
else
{
int chunkNumber = [convertedString intValue];
if(chunkNumber>0 && (chunkNumber+1)<=chunksCount)
{
return [chunksArray objectAtIndex:(chunkNumber)];
}
}
NSLog(#"Releasing Array");
[chunksArray release];
chunksCount = 0;
return [NSData dataWithBase64EncodedString:#"Stop"];
}];
}
code on javascript side is
var socket;
var chunkCount = 0;
var soundBlob, soundUrl;
var smallBlobs = new Array();
function captureMovieCallback(response)
{
if(socket)
{
try{
socket.send('start');
}
catch(e)
{
log('Socket is not valid object');
}
}
else
{
log('socket is null');
}
}
function closeSocket(response)
{
socket.close();
}
function connect(){
try{
window.WebSocket = window.WebSocket || window.MozWebSocket;
socket = new WebSocket('ws://127.0.0.1:9000',
'echo-protocol');
socket.onopen = function(){
}
socket.onmessage = function(e){
var data = e.data;
if(e.data instanceof ArrayBuffer)
{
log('its arrayBuffer');
}
else if(e.data instanceof Blob)
{
if(soundBlob)
log('its Blob of size = '+ e.data.size + ' final blob size:'+ soundBlob.size);
if(e.data.size != 3)
{
//log('its Blob of size = '+ e.data.size);
smallBlobs[chunkCount]= e.data;
chunkCount = chunkCount +1;
socket.send(''+chunkCount);
}
else
{
//alert('End Received');
try{
soundBlob = new Blob(smallBlobs,{ "type" : "audio/wav" });
var myURL = window.URL || window.webkitURL;
soundUrl = myURL.createObjectURL(soundBlob);
log('soundURL='+soundUrl);
}
catch(e)
{
log('Problem creating blob and url.');
}
try{
var serverUrl = 'http://10.44.45.74:8080/MyTestProject/WebRecording?record';
var xhr = new XMLHttpRequest();
xhr.open('POST',serverUrl,true);
xhr.setRequestHeader("content-type","multipart/form-data");
xhr.send(soundBlob);
}
catch(e)
{
log('error uploading blob file');
}
socket.close();
}
//alert(JSON.stringify(msg, null, 4));
}
else
{
log('dont know');
}
}
socket.onclose = function(){
//message('<p class="event">Socket Status: '+socket.readyState+' (Closed)');
log('final blob size:'+soundBlob.size);
}
} catch(exception){
log('<p>Error: '+exception);
}
}
function log(msg) {
NativeBridge.log(msg);
}
function stopCapture() {
NativeBridge.call("stopMovie", null,null);
}
function startCapture() {
NativeBridge.call("captureMovie",null,captureMovieCallback);
}
NativeBridge.js
var NativeBridge = {
callbacksCount : 1,
callbacks : {},
// Automatically called by native layer when a result is available
resultForCallback : function resultForCallback(callbackId, resultArray) {
try {
var callback = NativeBridge.callbacks[callbackId];
if (!callback) return;
console.log("calling callback for "+callbackId);
callback.apply(null,resultArray);
} catch(e) {alert(e)}
},
// Use this in javascript to request native objective-c code
// functionName : string (I think the name is explicit :p)
// args : array of arguments
// callback : function with n-arguments that is going to be called when the native code returned
call : function call(functionName, args, callback) {
//alert("call");
//alert('callback='+callback);
var hasCallback = callback && typeof callback == "function";
var callbackId = hasCallback ? NativeBridge.callbacksCount++ : 0;
if (hasCallback)
NativeBridge.callbacks[callbackId] = callback;
var iframe = document.createElement("IFRAME");
iframe.setAttribute("src", "js-frame:" + functionName + ":" + callbackId+ ":" + encodeURIComponent(JSON.stringify(args)));
document.documentElement.appendChild(iframe);
iframe.parentNode.removeChild(iframe);
iframe = null;
},
log : function log(message) {
var iframe = document.createElement("IFRAME");
iframe.setAttribute("src", "ios-log:"+encodeURIComponent(JSON.stringify("#iOS#" + message)));
document.documentElement.appendChild(iframe);
iframe.parentNode.removeChild(iframe);
iframe = null;
}
};
we call connect() on javascript side on body load in html side
Once we receive callback(captureMovieCallback) from startCapture function, we send
start message indicating that we are ready to accept the data.
server on objective c side splits the wav audio data in small chunks of chunksize=60*1023
and stores in array.
sends the first block back to javascript side.
javascript accepts this block and sends the number of next block that it need from server.
server sends block indicated by this number. This process is repeated untill we
send the last block to javascript.
At the last we send stop message back to javascript side indicating that we are done. it
is apparently 3 bytes in size ( which is used as criteria to break this loop.)
Every block is stored as small blob in array. Now we create a bigger blobs from these
small blobs using following line
soundBlob = new Blob(smallBlobs,{ "type" : "audio/wav" });
This blob is uploaded to server which writes this blob as wav file.
we can pass url to this wav file as src of audio tag to replay it back on javascript side.
we close the websocket connection after sending blob to server.
Hope this is clear enough to understand.
If all you want to do is to play the sound than you'd be much better off using one of the native audio playback systems in iOS rather than the HTML audio tag.

Categories