I created a custom form which has dynamic subform as well. for eg: the custom form consist of 3 sections:
Parent form.
Attachment control
Sub form with add/remove button for creating multiple subforms.
Here how my script works for adding the data: Parent form gets submitted and it returns item ID using jsom. And based on that item ID, Attachments are added to parent form and sub-form data gets added in another list. But sometimes, i am facing conflict issue while adding attachments and here is the code:
if (flag == true) {
oLoader = SP.UI.ModalDialog.showWaitScreenWithNoClose("Working on it", "Creating a new Request...");
var data = [];
var fileArray = [];
$("#attachFilesContainer input:file").each(function () {
if ($(this)[0].files[0]) {
fileArray.push({ "Attachment": $(this)[0].files[0] });
}
});
arraycount += fileArray.length;
data.push({
"Column_x0020_Name": $("#txtAccountNumber").val(),
"Warehouse_x0020_Code": $("#wareHousedrpdown option:selected").text(),
"Facility": $("#Facilitydrpdown option:selected").text(),
"Internal_x002f_External": $("#InternalExteralDrpdown :selected").text(),
"Requested_x0020_Completion_x0020": newReqDate,//$("#txtRequestedCompletionDate").datepicker('getDate').format('MM/dd/yyyy'), //$("#txtRequestedCompletionDate").val(),
"Account_x0020_Management_x0020_A": AccountName,
"Quote_x0020_Required_x003f_": $("#drpQuoteRequired :selected").text(),
"Files": fileArray
});
createItemWithAttachments("Parent", data).then(
function () {
oLoader.close();
window.location.replace(_spPageContextInfo.webAbsoluteUrl + "/Lists/Parent/AllItems.aspx");
//if (oLoader.close) setTimeout(function () { oLoader.close(); window.location.replace(_spPageContextInfo.webAbsoluteUrl + "/Lists/Test/AllItems.aspx"); }, 3000);
//alert('Item created with Multiple attachments');
},
function (sender, args) {
console.log('Error occured' + args.get_message());
}
)
//oLoader.close();
//window.location.replace(_spPageContextInfo.webAbsoluteUrl + "/Lists/Parent/AllItems.aspx");
}
function createSubformItem(listName,i) {
var listItem = {
__metadata: { "type": "SP.Data.SubFormListItem" },
"ParentID": id,
"Start_x0020_SKU":$("input[id='txtStartSKU" + i + "']").val(),
"Qty_x0020_Requested":$("input[id='txtQtyRequested" + i + "']").val(),
"UOM":$("#UOMdrpdown" + i + " option:selected").val(),
"SSRType":$("#SSRTypedrpdown" + i + " option:selected").val()!="null" ? { "__metadata": { "type": "Collection(Edm.String)" }, "results": $("#SSRTypedrpdown"+i+"").val() } : { "__metadata": { "type": "Collection(Edm.String)" }, "results": [""] },
"Hold_x0020_Type":$("#SSRHoldTypedrpdown" + i + " option:selected").val(),
"End_x0020_SKU":$("input[id='txtEndSKU" + i + "']").val(),
"Billing_x0020_UOM":$("#BillingUOMdrpdown" + i + " option:selected").val(),
"Price_x0020_per_x0020_UOM":$("input[id='txtPricePerUOM" + i + "']").val(),
"Instructions":$("textarea[title='Instructions" + i + "']").val(),
};
return $.ajax({
url:"http://devapp/app/_api/web/lists/getbytitle('SubForm')/items",
type: "POST",
contentType: "application/json;odata=verbose",
data: JSON.stringify(listItem),
headers: {
"Accept": "application/json;odata=verbose",
"X-RequestDigest": $("#__REQUESTDIGEST").val()
}
});
}
var createItemWithAttachments = function (listName, listValues) {
var fileCountCheck = 0;
var fileNames;
var context = new SP.ClientContext.get_current();
var dfd = $.Deferred();
var targetList = context.get_web().get_lists().getByTitle(listName);
context.load(targetList);
var singleUser = listValues[0].Account_x0020_Management_x0020_A != "" ? SP.FieldUserValue.fromUser(listValues[0].Account_x0020_Management_x0020_A) : null;
var itemCreateInfo = new SP.ListItemCreationInformation();
var listItem = targetList.addItem(itemCreateInfo);
listItem.set_item("Account_x0020_Number", listValues[0].Account_x0020_Number);
listItem.set_item("Warehouse_x0020_Code", listValues[0].Warehouse_x0020_Code);
listItem.set_item("Facility", listValues[0].Facility);
listItem.set_item("Internal_x002f_External", listValues[0].Internal_x002f_External);
listItem.set_item("Requested_x0020_Completion_x0020", listValues[0].Requested_x0020_Completion_x0020);
listItem.set_item("Account_x0020_Management_x0020_A", singleUser);
listItem.set_item("Quote_x0020_Required_x003f_", listValues[0].Quote_x0020_Required_x003f_);
listItem.update();
for (i = 0; i <= count; i++)
{
createSubformItem("SubForm",i);
}
context.executeQueryAsync(
function () {
id = listItem.get_id();
if (listValues[0].Files.length != 0) {
if (fileCountCheck <= listValues[0].Files.length - 1) {
loopFileUpload(listName, id, listValues, fileCountCheck).then(
function () {
},
function (sender, args) {
console.log("Error uploading");
dfd.reject(sender, args);
}
);
}
}
else {
dfd.resolve(fileCountCheck);
}
},
function (sender, args) {
console.log('Error occured' + args.get_message());
}
);
return dfd.promise();
}
/*End of */
function loopFileUpload(listName, id, listValues, fileCountCheck) {
var dfd = $.Deferred();
uploadFile(listName, id, listValues[0].Files[fileCountCheck].Attachment).then(
function (data) {
var objcontext = new SP.ClientContext();
var targetList = objcontext.get_web().get_lists().getByTitle(listName);
var listItem = targetList.getItemById(id);
objcontext.load(listItem);
objcontext.executeQueryAsync(function () {
console.log("Reload List Item- Success");
fileCountCheck++;
if (fileCountCheck <= listValues[0].Files.length - 1) {
loopFileUpload(listName, id, listValues, fileCountCheck);
} else {
console.log(fileCountCheck + ": Files uploaded");
attcount += fileCountCheck;
if (arraycount == attcount) {
for (i = 0; i <= count; i++)
{
createSubformItem("SubForm",i);
}
oLoader.close();
window.location.replace(_spPageContextInfo.webAbsoluteUrl + "/Lists/ParentList/AllItems.aspx");
}
}
},
function (sender, args) {
console.log("Reload List Item- Fail" + args.get_message());
});
},
function (sender, args) {
console.log("Not uploaded");
dfd.reject(sender, args);
}
);
return dfd.promise();
}
function uploadFile(listName, id, file) {
var deferred = $.Deferred();
var fileName = file.name;
getFileBuffer(file).then(
function (buffer) {
var bytes = new Uint8Array(buffer);
var binary = '';
for (var b = 0; b < bytes.length; b++) {
binary += String.fromCharCode(bytes[b]);
}
var scriptbase = _spPageContextInfo.webServerRelativeUrl + "/_layouts/15/";
console.log(' File size:' + bytes.length);
$.getScript(scriptbase + "SP.RequestExecutor.js", function () {
var createitem = new SP.RequestExecutor(_spPageContextInfo.webServerRelativeUrl);
createitem.executeAsync({
url: _spPageContextInfo.webServerRelativeUrl + "/_api/web/lists/GetByTitle('" + listName + "')/items(" + id + ")/AttachmentFiles/add(FileName='" + file.name + "')",
method: "POST",
binaryStringRequestBody: true,
body: binary,
success: fsucc,
error: ferr,
state: "Update"
});
function fsucc(data) {
console.log(data + ' uploaded successfully');
deferred.resolve(data);
}
function ferr(data) {
console.log(fileName + "not uploaded error");
deferred.reject(data);
}
});
},
function (err) {
deferred.reject(err);
}
);
return deferred.promise();
}
function getFileBuffer(file) {
var deferred = $.Deferred();
var reader = new FileReader();
reader.onload = function (e) {
deferred.resolve(e.target.result);
}
reader.onerror = function (e) {
deferred.reject(e.target.error);
}
reader.readAsArrayBuffer(file);
return deferred.promise();
}
The issue is that upload another attachment before SharePoint is done processing the item with larger files. So when you try to perform another operation on the item (adding another attachment, etc) a race condition is is reached and SharePoint throws the error. When the attachment files are smaller, the process has time to complete before you start the next upload.
You need to find a way to check if the item has completed it's processing. One way of doing this might be to do a get and check the item's etag and ensure that it has incremented the correct number of times before sending another POST.
Related
im having a problem with my function Mrequest,the problem is that data like id and year are not add to de array. I know is a problem with the function but i just cant solve it.
any idea of what could i change so my array result get the ID and the YEAR
function getContent() {
var result = [];
async.series([
getDb,
getInfos
]);
function getDb(done) {
//posta
var query = "SELECT title , launch_year FROM content WHERE content_genre_id=1 && content_type_id!=2 LIMIT 2;"
mysqlConnection.query(query, function(err, data) {
result = data;
async.each(result, getPelicula, done);
});
}
function Mrequest(pagina, callback){
request({
url: pagina,
method: "GET",
json: true,
}, callback);
}
function getPelicula(pelicula, donePelicula) {
var peli = pelicula.title;
var pagina = "http://api.themoviedb.org/3/search/movie?query=" + peli + "&api_key=3e2709c4c051b07326f1080b90e283b4&language=en=ES&page=1&include_adult=false"
setTimeout(function() {
Mrequest(pagina, function(error, res, body) {
if (error) {
console.log("error", error);
} else {
var control = body.results.length;
if (control > 0) {
var year_base = pelicula.launch_year;
var id = body.results[0].id;
var year = body.results[0].release_date;
var d = new Date(year);
var year_solo = d.getFullYear();
console.log(pelicula);
console.log("id",id);
console.log("year",year);
console.log("year",year_solo);
if (year_base == year_solo) {
pelicula.id = id;
pelicula.year_pagina = year_solo;
} else {
pelicula.id = -1;
pelicula.year_pagina = null;
}
}
}
});
}, result.indexOf(pelicula) * 3000);
donePelicula();
}
getContent();
}
it doesn't look like you are making the request because getContent is being called from within itself
I'm using the code from netsniff.js to generate a har file and I want to improve it to generate a har file from multiple links given in an array (named links in my below code).
There is another question here Using Multiple page.open in Single Script that might help me, but I have no idea how to implement the given solution in my code..
Below is my code (it logs FAIL to load the address in the output file if the links array contain more than one item):
"use strict";
if (!Date.prototype.toISOString) {
Date.prototype.toISOString = function () {
function pad(n) { return n < 10 ? '0' + n : n; }
function ms(n) { return n < 10 ? '00'+ n : n < 100 ? '0' + n : n }
return this.getFullYear() + '-' +
pad(this.getMonth() + 1) + '-' +
pad(this.getDate()) + 'T' +
pad(this.getHours()) + ':' +
pad(this.getMinutes()) + ':' +
pad(this.getSeconds()) + '.' +
ms(this.getMilliseconds()) + 'Z';
}
}
var entries = [];
function createHAR(address, title, startTime, resources)
{
resources.forEach(function (resource) {
var request = resource.request,
startReply = resource.startReply,
endReply = resource.endReply;
if (!request || !startReply || !endReply) {
return;
}
// Exclude Data URI from HAR file because
// they aren't included in specification
if (request.url.match(/(^data:image\/.*)/i)) {
return;
}
entries.push({
startedDateTime: request.time.toISOString(),
time: endReply.time - request.time,
request: {
method: request.method,
url: request.url,
httpVersion: "HTTP/1.1",
cookies: [],
headers: request.headers,
queryString: [],
headersSize: -1,
bodySize: -1
},
response: {
status: endReply.status,
statusText: endReply.statusText,
httpVersion: "HTTP/1.1",
cookies: [],
headers: endReply.headers,
redirectURL: "",
headersSize: -1,
bodySize: startReply.bodySize,
content: {
size: startReply.bodySize,
mimeType: endReply.contentType
}
},
cache: {},
timings: {
blocked: 0,
dns: -1,
connect: -1,
send: 0,
wait: startReply.time - request.time,
receive: endReply.time - startReply.time,
ssl: -1
},
pageref: address
});
});
return {
log: {
version: '1.2',
creator: {
name: "PhantomJS",
version: phantom.version.major + '.' + phantom.version.minor +
'.' + phantom.version.patch
},
pages: [{
startedDateTime: startTime.toISOString(),
id: address,
title: title,
pageTimings: {
onLoad: page.endTime - page.startTime
}
}],
entries: entries
}
};
}
var page = require('webpage').create()
var fs = require('fs');
var count = 0;
function processSites(links)
{
page.address = links.pop();
var path = 'file' + count + '.har';
page.resources = [];
console.log("page resources:", page.resources)
count = count + 1;
page.onLoadStarted = function () {
page.startTime = new Date();
};
page.onResourceRequested = function (req) {
page.resources[req.id] = {
request: req,
startReply: null,
endReply: null
};
};
page.onResourceReceived = function (res) {
if (res.stage === 'start') {
page.resources[res.id].startReply = res;
}
if (res.stage === 'end') {
page.resources[res.id].endReply = res;
}
};
page.open(page.address, function (status) {
var har;
setTimeout(function () {
if (status !== 'success') {
console.log('FAIL to load the address');
phantom.exit(1);
} else {
page.endTime = new Date();
page.title = page.evaluate(function () {
return document.title;
});
entries = [];
har = createHAR(page.address, page.title, page.startTime, page.resources);
// console.log(JSON.stringify(har, undefined, 4));
fs.write(path, JSON.stringify(har), 'w');
if(links.length > 0)
{
processSites(links);
}
else
{
phantom.exit();
}
}
}, 10000);
});
}
var links = ["http://stackoverflow.com", "http://marvel.com"];
processSites(links);
Update:
The above code generate two har files file1.har and file2.har, but the second har file also contains the har code generated from both links, and it should only have the har code for the first link...
Fixed this by setting var har = " "
You can't iterate opening pages in PhantomJS in a simple loop because page.open method is asynchronous. It doesn't wait for first site to be processed, opening the second right away.
I've rewritten your script to use recursion: next site will be opened only after the current is processed. (Note: if any of the sites in queue will fail to load the whole process will halt, but you can easily rewrite the script to avoid that).
if (!Date.prototype.toISOString) {
Date.prototype.toISOString = function () {
// ...
}
}
var entries = [];
function createHAR(address, title, startTime, resources)
{
// ...
}
var page = require('webpage').create()
function processSites(links)
{
page.address = links.pop();
console.log("PAGE ADDRESS: ", page.address);
page.resources = [];
page.onLoadStarted = function () {
page.startTime = new Date();
};
page.onResourceRequested = function (req) {
page.resources[req.id] = {
request: req,
startReply: null,
endReply: null
};
};
page.onResourceReceived = function (res) {
if (res.stage === 'start') {
page.resources[res.id].startReply = res;
}
if (res.stage === 'end') {
page.resources[res.id].endReply = res;
}
};
page.open(page.address, function (status) {
var har;
setTimeout(function () {
if (status !== 'success') {
console.log('FAIL to load the address');
phantom.exit(1);
} else {
page.endTime = new Date();
page.title = page.evaluate(function () {
return document.title;
});
har = createHAR(page.address, page.title, page.startTime, page.resources);
console.log(JSON.stringify(har, undefined, 4));
if(links.length > 0)
{
processSites(links);
}
else
{
phantom.exit();
}
}
}, 10000);
});
}
var links = ["http://edition.cnn.com", "http://stackoverflow.com"];
processSites(links);
C#/MVC/Knockout/JSON
I've got the following javascript:
function Feed(data) {
this.ID = ko.observable(data.ID);
this.RSSName = ko.observable(data.RSSName);
alert(data.RSSName + " " + data.ID);
}
function ViewModel() {
self = this;
self.CurrentFeeds = ko.observableArray([]);
self.isLoading = ko.observable(false);
self.StatusMessage = ko.observable("Loading");
$.ajax({
type: "GET",
url: '#Url.Action("RSSList", "RSS")',
success: function (data) {
var feeds = $.map(data, function (item) {
alert(item.RSSName + " " + item.ID + " 1");
return new Feed(item)
});
self.CurrentFeeds(feeds);
//self.CurrentFeeds(data);
},
error: function (err) {
alert(err.status + " : " + err.statusText);
}
});
self.save = function () {
self.deleteFeed = function (feed) {
};
};
}
The JSON response (as copied from fiddler) looks like this:
{"aaData":[{"ID":"0","RSSName":"Most Recent"},{"ID":"1","RSSName":"Website feed"}]}
Controller:
public JsonResult RSSList()
{
var query = (from t in db.tblRSSFeeds
select new ViewModels.RSSList()
{
ID = t.pkID.ToString(),
RSSName = t.szFeedName
}).OrderBy( t => t.RSSName).ToList();
var recent = new ViewModels.RSSList();
recent.ID = "0";
recent.RSSName = "Most Recent";
query.Insert(0, recent);
return Json( query, JsonRequestBehavior.AllowGet);
}
I'm thinking my issue has to do with the Feed(data) function in that it's only passing back one record. I tried setting the self.CurrentFeeds(data) as well with no luck. The "alerts" shown above show undefined but I can see the data coming down from fiddler...
For some reason the success function isn't seeing the data correctly to create the array. Why is this?
If it is the response:
{"aaData":[{"ID":"0","RSSName":"Most Recent"},{"ID":"1","RSSName":"Website feed"}]}
Change the success callback to:
$.ajax({
type: "GET",
url: '#Url.Action("RSSList", "RSS")',
success: function (data) {
var feeds = $.map(data.aaData, function (item) {
alert(item.RSSName + " " + item.ID + " 1");
return new Feed(item)
});
self.CurrentFeeds(feeds);
},
error: function (err) {
alert(err.status + " : " + err.statusText);
}
});
And i belive it works, because you are trying to map an object not an array, so you must to get the aaData that is the array to map.
Hi i'm relatively new to JavaScript and i'm working on a winjs app project where i want to use the Bing image search data source example in my project to virtualize the datasource of a listview.
My problem is understanding how the asynchronous functions work together and how to implement an async xhr request within the existing one.
Currently i'm using a synchronous request but i would like to change that into a asynchronous one.
This is my data adapter:
(function () {
var xxxDataAdapter = WinJS.Class.define(
function (devkey, query, delay) {
this._minPageSize = 2;
this._maxPageSize = 5;
this._maxCount = 50;
this._devkey = devkey;
this._query = query;
this._delay = 0;
},
{
getCount: function () {
var that = this;
var requestStr = 'http://xxx/' + that._query;
return WinJS.xhr({ url: requestStr, type: "GET", /*user: "foo", password: that._devkey,*/ }).then(
function (request) {
var obj = JSON.parse(request.responseText);
if (typeof obj.error === "undefined") {
var count = obj.length;
if (count === 0) { console.log("The search returned 0 results.", "sample", "error"); }
return count;
} else {
console.log("Error fetching results from API", "sample", "error");
return 0;
}
},
function (request) {
if (request && request.name === "Canceled") {
return WinJS.Promise.wrapError(request);
} else {
if (request.status === 401) {
console.log(request.statusText, "sample", "error");
} else {
console.log("Error fetching data from the service. " + request.responseText, "sample", "error");
}
return 0;
}
});
},
itemsFromIndex: function (requestIndex, countBefore, countAfter)
{
var that = this;
if (requestIndex >= that._maxCount) {
return WinJS.Promise.wrapError(new WinJS.ErrorFromName(WinJS.UI.FetchError.doesNotExist));
}
var fetchSize, fetchIndex;
if (countBefore > countAfter) {
//Limit the overlap
countAfter = Math.min(countAfter, 0);
//Bound the request size based on the minimum and maximum sizes
var fetchBefore = Math.max(Math.min(countBefore, that._maxPageSize - (countAfter + 1)), that._minPageSize - (countAfter + 1));
fetchSize = fetchBefore + countAfter + 1;
fetchIndex = requestIndex - fetchBefore;
} else {
countBefore = Math.min(countBefore, 10);
var fetchAfter = Math.max(Math.min(countAfter, that._maxPageSize - (countBefore + 1)), that._minPageSize - (countBefore + 1));
fetchSize = countBefore + fetchAfter + 1;
fetchIndex = requestIndex - countBefore;
}
var requestStr = 'http://xxx/' + that._query;
return WinJS.xhr({ url: requestStr, type: "GET", /*user: "foo", password: that._devkey,*/ }).then(
function (request)
{
var results = [], count;
var obj = JSON.parse(request.responseText);
if (typeof obj.error === "undefined")
{
var items = obj;
for (var i = 0, itemsLength = items.length; i < itemsLength; i++)
{
var dataItem = items[i];
var req = new XMLHttpRequest();
// false = synchronous
req.open("get", "http://xxxxx/" + dataItem.id, false);
req.send();
var jobj = JSON.parse(req.response);
if (typeof jobj.error === "undefined")
{
results.push({
key: (fetchIndex + i).toString(),
data: {
title: jobj.name.normal,
date: Date.jsonFormat(dataItem.calculatedAt, "Do, MMM HH:mm Z"),
result: "",
status: "",
}
});
}
}
return {
items: results, // The array of items
offset: requestIndex - fetchIndex, // The offset into the array for the requested item
};
} else {
console.log(request.statusText, "sample", "error");
return WinJS.Promise.wrapError(new WinJS.ErrorFromName(WinJS.UI.FetchError.doesNotExist));
}
},
function (request)
{
if (request.status === 401) {
console.log(request.statusText, "sample", "error");
} else {
console.log("Error fetching data from the service. " + request.responseText, "sample", "error");
}
return WinJS.Promise.wrapError(new WinJS.ErrorFromName(WinJS.UI.FetchError.noResponse));
}
);
}
});
WinJS.Namespace.define("xxx", {
datasource: WinJS.Class.derive(WinJS.UI.VirtualizedDataSource, function (devkey, query, delay) {
this._baseDataSourceConstructor(new xxxDataAdapter(devkey, query, delay));
})
});
})();
And this is the synchronous request i would like to change to an asynchronous one:
var req = new XMLHttpRequest();
// false = synchronous
req.open("get", "http://xxxxx/" + dataItem.id, false);
req.send();
you can use then function to chain promises. In your scenario, then function need to simple have a if statement.
return WinJS.xhr(params).then(function (req)
{
if (..)
return WinJS.xhr(params2);
else
return; // then function ensures wrapping your sync result in a completed promise
}, function onerror(e)
{
// todo - error handling code e.g. showing a message box based on your app requirement
});
This is what i came up with. Map the json objects received asynchronously and make another asynchronous call for each object to get additional data. Then the nested async calls are joined and returned when all are finished.
return WinJS.xhr({ url: 'http://xxx=' + that._query }).then(function (request) {
var results = [];
var obj = JSON.parse(request.responseText);
var xhrs = obj.map(function (dataItem, index) {
return WinJS.xhr({ url: 'http://xxxx' + dataItem.attrx }).then(
function completed(nestedRequest) {
var xxJobj = JSON.parse(nestedRequest.responseText);
var dataObj = {};
dataObj.title = xxJobj.name;
dataObj.date = Date.jsonFormat(dataItem.attrtrxx, "Do, MMM HH:mm Z");
dataObj.result = "open";
dataObj.status = "foo";
if (dataItem.xx.hasOwnProperty("attrx5")) {
dataObj.opponent = dataItem.attrx4;
} else {
dataObj.opponent = dataItem.attrx3;
}
dataObj.page_title = "xXx";
dataObj.match_id = dataItem.id;
dataObj.type = "largeListIconTextItem";
dataObj.bg_image = "http://xxx/" + xxJobj.attrx2 + "-portrait.jpg";
results.push({
key: (fetchIndex + index).toString(),
data: dataObj
});
},
function (err) {
console.log(err.status);
console.log(err.responseText);
}
);
});
return WinJS.Promise.join(xhrs).then(
function (promises) {
return {
items: results, // The array of items
offset: requestIndex - fetchIndex, // The offset into the array for the requested item
};
},
function (err) {
console.log(JSON.stringify(err));
}
);
});
I have an input text box which fires each time when the user enters data and fills the input text.I'm using bootstrap typehead. Problem is when i enter a letter a it does fire ajax jquery call and fetch the data but the input text box is not populated.Now when another letter aw is entered the data fetched against letter a is filled in the text area.
I have hosted the code here http://hakunalabs.appspot.com/chartPage
Ok so here is part of my html code
<script type="text/javascript">
$(document).ready(function () {
$('#txt').keyup(function () {
delay(function () {
CallData();
}, 1000);
});
});
var delay = (function () {
var timer = 0;
return function (callback, ms) {
clearTimeout(timer);
timer = setTimeout(callback, ms);
};
})();
</script>
<input type="text" id="txt" runat="server" class="span4 typeahead local remote" placeholder="Search..." />
And here is my javascript code
var DataProvider;
function CallData() {
DataProvider = [];
var vdata = $('#txt').val();
if (vdata != "") {
var urlt = "http://examples/search?keyword=" + vdata + "&callback=my_callback";
$.ajax({
type: "GET",
url: urlt,
jsonpCallback: "my_callback",
dataType: "jsonp",
async: false,
error: function (xhr, errorType, exception) {
var errorMessage = exception || xhr.statusText;
alert("Excep:: " + exception + "Status:: " + xhr.statusText);
}
});
}
}
function my_callback(data) {
var NameArray = new Array();
var descArray = new Array();
for (var i = 0; i < data.count; i++) {
NameArray.push(data.response[i].days_till_close + " Days Left | " + data.response[i].name + " | " + data.response[i].description);
}
for (var i = 0; i < data.count; i++) {
descArray.push(data.response[i].description);
}
DataProvider = [];
for (var i = 0; i < data.count; i++) {
var dataObject = { id: i + 1, name: NameArray[i], description: descArray[i] };
DataProvider.push(dataObject);
}
var vdata = $('#txt').val();
var urlp = "http://example.com/v1/members/search?keyword=" + vdata + "&my_callbackMember";
$.ajax({
type: "GET",
url: urlp,
jsonpCallback: "my_callbackMember",
dataType: "jsonp",
error: function (xhr, errorType, exception) {
var errorMessage = exception || xhr.statusText;
alert("Excep:: " + exception + "Status:: " + xhr.statusText);
}
});
}
function my_callbackMember(data) {
var memberArray = new Array();
for (var i = 0; i < data.count; i++) {
memberArray.push(data.response[i].name);
}
for (var i = 0; i < data.count; i++) {
var dataObject = { id: i + 1, name: memberArray[i] };
DataProvider.push(dataObject);
}
localStorage.setItem("name", JSON.stringify(DataProvider));
var sources = [
{ name: "local", type: "localStorage", key: "name", display: "country" }
];
$('input.typeahead.local.remote').typeahead({
sources: [{ name: "", type: "localStorage", key: "name", display: "name"}],
itemSelected: function (obj) { alert(obj); }
});
}
Your issue is that typeahead can only present to you the results that are already in localstorage at the moment when you do a key press. Because your results are fetched via AJAX, they only show up in localstorage a second or so AFTER you've pressed the key. Therefore, you will always see the results of the last successful ajax requests in your typeahead results.
Read the bootstrap documentation for type aheads http://twitter.github.com/bootstrap/javascript.html#typeahead and read the section about "source". You can define a "process" callback via the arguments passed to your source function for asynchronous data sources.